Compare commits

...

109 Commits

Author SHA1 Message Date
Jan Bouwhuis
bd095ebf0a Merge branch 'dev' into homewizard-usage 2026-02-16 18:08:07 +01:00
epenet
d85040058f Improve type hints in aosmith water_heater (#163191) 2026-02-16 18:07:10 +01:00
epenet
a5c1ed593c Improve type hints in atag water_heater (#163192) 2026-02-16 18:06:40 +01:00
jbouwh
1edfd2da23 Do not purge deleted devices 2026-02-16 17:00:29 +00:00
Joost Lekkerkerker
977ee1a9d1 Add snapshot testing to SleepIQ (#163179) 2026-02-16 17:59:51 +01:00
epenet
6c433d0809 Improve type hints in roomba vacuum (#163184) 2026-02-16 17:53:38 +01:00
epenet
d370a730c2 Mark update method type hints as mandatory (#163182) 2026-02-16 17:51:12 +01:00
Markus Adrario
19aaaf6cc6 Add Lux to homee units (#163180) 2026-02-16 17:32:22 +01:00
Andrew Jackson
9e14a643c0 Add Mastodon reconfigure flow (#163178) 2026-02-16 17:15:29 +01:00
Perchun Pak
80fccaec56 minecraft_server: do not use mcstatus' internal objects (#163101) 2026-02-16 17:15:04 +01:00
Matthias Alphart
09b122e670 KNX Sensor: set device and state class for YAML entities based on DPT (#159465) 2026-02-16 17:12:47 +01:00
Kamil Breguła
2684f4b555 Update quality scale of WLED integration to platinum (#162680)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-16 17:03:14 +01:00
epenet
cbc2928c4a Rename devolo test variables and aliases (#163175) 2026-02-16 16:53:22 +01:00
Kamil Breguła
aab4f57580 Add missing native_unit_of_measurement in WLED (#157802)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-16 16:49:24 +01:00
epenet
fed9ed615e Rename DOMAIN aliases in tests (#163176) 2026-02-16 16:47:53 +01:00
On Freund
97df38f1da Add MTA New York City Transit integration (#156846)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-16 16:47:24 +01:00
Josef Zweck
be228dbe47 Fix title for onedrive for business (#163134) 2026-02-16 16:45:47 +01:00
Brett Adams
0292a8cd7e Add quality scale to Advantage Air integration (#160476)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-16 16:44:40 +01:00
epenet
a308b84f15 Use hardware/usb domain constant in tests (#162934) 2026-02-16 16:39:28 +01:00
doggyben
fdc264cf71 Change Facebook notify tag from ACCOUNT_UPDATE to HUMAN_AGENT (#162890) 2026-02-16 15:35:26 +00:00
Andrew Jackson
dfd61f85c2 Add reauth to Mastodon (#163148) 2026-02-16 16:29:20 +01:00
epenet
7ab4f2f431 Use HassKey in usb (#163138) 2026-02-16 16:21:29 +01:00
Daniel Hjelseth Høyer
be31f01fc2 Homevolt quality scale (#163038)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-16 16:21:15 +01:00
Brett Adams
8d228b6e6a Add battery health sensors to Tessie (#162908)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-16 15:57:05 +01:00
Franck Nijhof
46a1dda8d8 Fix CI partial run glob expansion without reintroducing template injection (#163170) 2026-02-16 15:50:50 +01:00
Franck Nijhof
8a5d5a8468 Fix flaky fritz update tests caused by class attribute pollution in test fixtures (#163169) 2026-02-16 15:25:08 +01:00
Manu
6e48172654 Improve typing in HTML5 webpush integration (#163162) 2026-02-16 15:21:25 +01:00
Franck Nijhof
1e6196c6e8 Add zizmor as a CI check for GitHub Actions workflows (#163161) 2026-02-16 15:18:55 +01:00
Manu
726870b829 Add py_vapid to requirements in HTML5 integration (#163165) 2026-02-16 15:09:07 +01:00
Ludovic BOUÉ
c5b1b4482d Fix device class for Matter Nitrogen Dioxide Sensor (#162965) 2026-02-16 15:00:52 +01:00
Franck Nijhof
e88be6bdeb Fix dependabot cooldown config for github-actions ecosystem (#163166) 2026-02-16 14:56:33 +01:00
AlexSp
3a0bde5d3e Add dependabot cooldown (#163082)
Co-authored-by: Franck Nijhof <git@frenck.dev>
2026-02-16 14:29:49 +01:00
epenet
8dc9937ba4 Prefer explicit parametrize in litterrobot tests (#163155) 2026-02-16 14:27:58 +01:00
hanwg
2d2ea3d31c Cleanup unused code for Telegram bot (#163147) 2026-02-16 14:24:24 +01:00
epenet
26f852d934 Fix incorrect use of Platform enum in homematicip_cloud tests (#163149) 2026-02-16 14:11:44 +01:00
epenet
9977c58aaa Fix incorrect use of Platform enum in wsdot tests (#163151) 2026-02-16 13:59:37 +01:00
Jan Bouwhuis
b664f2ca9a Remove unused MQTT CONF_COLOR_MODE const and abbreviation (#163146) 2026-02-16 13:56:54 +01:00
epenet
6bbe80da72 Fix incorrect use of Platform enum in threshold tests (#163154) 2026-02-16 13:56:28 +01:00
Glenn de Haan
5f3cb37ee6 Fix HDFury volt symbol (#163160) 2026-02-16 13:55:08 +01:00
epenet
27d715e26a Fix incorrect use of Platform enum in zha tests (#163150) 2026-02-16 13:47:29 +01:00
Ludovic BOUÉ
3ee20d5e5c Add ppm to NITROGEN_DIOXIDE units (#162983) 2026-02-16 13:32:39 +01:00
epenet
75b5248e2a Fix incorrect use of Platform enum in utility_meter tests (#163153) 2026-02-16 13:28:08 +01:00
Artur Pragacz
37af004a37 Deprecate async_listen in labs (#162648) 2026-02-16 13:20:44 +01:00
epenet
4510ca7994 Fix incorrect use of Platform enum in wmspro tests (#163152) 2026-02-16 13:08:43 +01:00
epenet
b8885791f7 Fix incorrect use of Platform enum in roborock tests (#163142) 2026-02-16 12:02:52 +01:00
epenet
9477fa4471 Fix incorrect use of Platform enum in flexit_bacnet tests (#163144) 2026-02-16 12:02:23 +01:00
epenet
d464806281 Fix incorrect use of Platform enum in huum tests (#163145) 2026-02-16 12:01:55 +01:00
epenet
3f00403c66 Fix incorrect use of Platform enum in evohome tests (#163143) 2026-02-16 11:54:02 +01:00
TheJulianJES
63f4653a3b Fix Matter translation key not set for primary entities (#161708) 2026-02-16 11:38:59 +01:00
Franck Nijhof
e48bd88581 Improve GitHub Actions workflow metadata and concurrency settings (#163117) 2026-02-16 11:38:40 +01:00
Erwin Douna
5d1cb4df94 Fix orphaned ignored typo (#163137) 2026-02-16 11:31:16 +01:00
Erwin Douna
6a49a25799 Handle orphaned ignored config entries (#153093)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2026-02-16 10:43:28 +01:00
Joakim Sørensen
206c4e38be Bump hass-nabucasa from 1.13.0 to 1.15.0 (#163129) 2026-02-16 09:35:50 +01:00
Jan Bouwhuis
98135a1968 Cleanup removed options from MQTT json light schema (#163119) 2026-02-16 09:27:06 +01:00
Matthias Alphart
eecfa68de6 Update xknx to 3.15.0 (#163111) 2026-02-16 09:26:22 +01:00
Petar Petrov
ffbb8c037e Migrate grid connections to single objects with import/export/power (#162200)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2026-02-16 09:55:42 +02:00
Allen Porter
4386b3d5cc Bump ical to 13.2.0 (#163123) 2026-02-16 08:44:36 +01:00
Franck Nijhof
3f755f1f0d CI security hardening: pin actions and images in builder and CI workflows (#163116) 2026-02-16 08:43:20 +01:00
dependabot[bot]
4cc9805a4b Bump github/codeql-action from 4.32.2 to 4.32.3 (#163126) 2026-02-16 08:39:56 +01:00
Manu
746461e59e Fix blocking call in Xbox config flow (#163122) 2026-02-16 08:16:18 +01:00
Artur Pragacz
ddb13b4ee7 Fix Z-Wave fan speed (#163093) 2026-02-15 22:55:06 +01:00
Manu
68b08a6147 Remove deprecated yaml import from HTML5 integration (#163094) 2026-02-15 22:26:10 +01:00
Franck Nijhof
2178c98ccc Assign no-stale to Tasks/Epic/Opportunity issue type (#163080) 2026-02-15 22:18:43 +01:00
mettolen
ebedb182c8 Pump pysaunum to 0.5.0 (#163021) 2026-02-15 22:15:46 +01:00
Christopher Fenner
335aa02f14 Bump PyViCare to 2.57.0 (#163071) 2026-02-15 22:12:42 +01:00
Klaas Schoute
2c6c2d09cc Update powerfox to v2.1.0 (#163095) 2026-02-15 22:07:50 +01:00
Artur Pragacz
c8308ad723 Remove extra friendly name from trend (#163105) 2026-02-15 20:59:31 +01:00
Andrea Turri
c65fa5b377 Add additional Miele fillingLevel sensors (#162104) 2026-02-15 20:30:21 +01:00
Ludovic BOUÉ
48ceb52ebb Bump python-roborock to version 4.14.0 in requirements files (#163098) 2026-02-15 10:13:35 -08:00
Jan Vaníček
49bea823f5 Add missing supported languages to Google Generative AI TTS (#163048) 2026-02-15 18:06:33 +01:00
Franck Nijhof
07dcc2eae0 CI security hardening: restrict permissions in AI issue detection workflows (#163068) 2026-02-15 16:33:23 +01:00
Franck Nijhof
8e1c6c2157 CI security hardening: prevent template injection in CI workflow (#163076) 2026-02-15 16:30:30 +01:00
Franck Nijhof
f10cb23aab CI security hardening: prevent template injection in builder workflow (#163075) 2026-02-15 16:27:46 +01:00
Franck Nijhof
7020bec262 CI security hardening: prevent template injection in translations workflow (#163074) 2026-02-15 16:26:13 +01:00
Franck Nijhof
980507480b CI security hardening: prevent template injection in wheels workflow (#163073) 2026-02-15 16:25:43 +01:00
Patrick Vorgers
7a52d71b40 Cloudflare R2 backup - Improved buffer handling (#162958) 2026-02-15 16:16:10 +01:00
Brett Adams
32092c73c6 Add energy history support to Tessie (#162976)
Co-authored-by: Claude Haiku 4.5 <noreply@anthropic.com>
2026-02-15 15:06:51 +01:00
Simone Chemelli
4846d51341 Improve coordinator coverage for Fritz (#163012) 2026-02-15 14:54:44 +01:00
Josef Zweck
75ddc3f9a1 Fix strings for onedrive for business (#163070)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-15 14:36:50 +01:00
Josef Zweck
11fe11cc03 Add reconfiguration to onedrive_for_business (#163054) 2026-02-15 13:32:05 +01:00
Manu
40890419bb Bump pywebpush to 2.3.0 (#163066) 2026-02-15 13:27:28 +01:00
Andrew Jackson
7e22a32dff Bump aiomealie to 1.2.1 (#163064) 2026-02-15 13:03:31 +01:00
Franck Nijhof
6cc2f835e4 CI security hardening: restrict permissions in CI workflow (#163063) 2026-02-15 12:58:48 +01:00
Franck Nijhof
b20959d938 CI security hardening: restrict permissions in builder workflow (#163062) 2026-02-15 12:58:24 +01:00
Josef Zweck
e456331062 Fix reauth flow for onedrive (#163061) 2026-02-15 12:02:55 +01:00
Franck Nijhof
e1194167cb CI security hardening: restrict permissions in translations workflow (#163057) 2026-02-15 12:00:10 +01:00
Franck Nijhof
3a6ca5ec17 CI security hardening: restrict permissions in wheels workflow (#163059) 2026-02-15 11:59:52 +01:00
Andrew Jackson
2850192068 Add get_account service to Mastodon (#161930)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2026-02-15 11:59:34 +01:00
Denis Shulyaka
49689ad677 Save failed intent results to chat log (#163031) 2026-02-15 11:52:13 +01:00
Josef Zweck
3408fc7520 Add reauth to onedrive_for_business (#163052) 2026-02-15 11:33:00 +01:00
Franck Nijhof
bf482a6b92 CI security hardening: restrict permissions in CodeQL workflow (#163053) 2026-02-15 11:28:58 +01:00
Franck Nijhof
7af63460ea CI security hardening: restrict permissions in restrict-task-creation workflow (#163051) 2026-02-15 11:22:25 +01:00
Franck Nijhof
755a3f82d4 CI security hardening: restrict permissions in lock workflow (#163050) 2026-02-15 11:22:06 +01:00
Franck Nijhof
71e9d54105 CI security hardening: restrict permissions in stale workflow (#163049) 2026-02-15 11:21:46 +01:00
Brett Adams
2208d7e92c Add island_status sensor and grid_status binary sensor to Tessie (#162975)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-15 11:18:26 +01:00
Kevin Stillhammer
ea281e14bf Fix default value of DurationSelector allow_negative (#162924) 2026-02-15 10:42:57 +01:00
TimL
fcdeaead6f Bump pysmlight v0.2.14 (#163035) 2026-02-15 10:29:36 +01:00
Christian Lackas
a264571ce3 Add ELV-SH-SMSI soil moisture sensor to homematicip_cloud (#161662) 2026-02-15 10:28:07 +01:00
Peter Kolbus
43988bf0f5 Add battery percentage sensor to weatherflow (#161200) 2026-02-15 10:19:02 +01:00
Andre Lengwenus
a9495f61a0 Bump pypck to 0.9.11 (#163043) 2026-02-15 10:08:07 +01:00
Xidorn Quan
1c19ddba55 Bump thermopro-ble to 1.1.3 (#163026) 2026-02-15 10:02:40 +01:00
Rezoran
99a07984fb Miele: add WASHER_DRYER to twindos compatibles (#162875) 2026-02-15 08:52:20 +01:00
Christian Lackas
6f17621957 Use suggested_display_precision for HmIP absolute humidity sensor (#162834) 2026-02-15 08:31:51 +01:00
mettolen
496f44e007 Fix authentication error handling in Liebherr coordinator (#163036) 2026-02-15 08:20:20 +01:00
Jan Bouwhuis
42308f8b68 Merge branch 'dev' into homewizard-usage 2026-02-13 19:07:51 +01:00
jbouwh
21bf96e1ad Add test cases for energy monitors without production energy 2026-02-12 17:15:53 +00:00
jbouwh
365bd95963 Test disabled sensors with usage option set 2026-02-11 08:20:40 +00:00
jbouwh
d889217944 Test setting up engergy plug via v1 API 2026-02-09 16:27:47 +00:00
jbouwh
6b8915dcba Allow to configure usage to determine default sensors during homewizard power monitoring setup 2026-02-09 13:35:31 +00:00
274 changed files with 11742 additions and 2257 deletions

View File

@@ -9,3 +9,5 @@ updates:
labels:
- dependency
- github_actions
cooldown:
default-days: 7

View File

@@ -18,11 +18,19 @@ env:
BASE_IMAGE_VERSION: "2026.01.0"
ARCHITECTURES: '["amd64", "aarch64"]'
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
init:
name: Initialize build
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
permissions:
contents: read # To check out the repository
outputs:
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.version.outputs.channel }}
@@ -41,16 +49,16 @@ jobs:
- name: Get information
id: info
uses: home-assistant/actions/helpers/info@master
uses: home-assistant/actions/helpers/info@master # zizmor: ignore[unpinned-uses]
- name: Get version
id: version
uses: home-assistant/actions/helpers/version@master
uses: home-assistant/actions/helpers/version@master # zizmor: ignore[unpinned-uses]
with:
type: ${{ env.BUILD_TYPE }}
- name: Verify version
uses: home-assistant/actions/helpers/verify-version@master
uses: home-assistant/actions/helpers/verify-version@master # zizmor: ignore[unpinned-uses]
with:
ignore-dev: true
@@ -84,9 +92,9 @@ jobs:
needs: init
runs-on: ${{ matrix.os }}
permissions:
contents: read
packages: write
id-token: write
contents: read # To check out the repository
packages: write # To push to GHCR
id-token: write # For cosign signing
strategy:
fail-fast: false
matrix:
@@ -135,11 +143,12 @@ jobs:
shell: bash
env:
UV_PRERELEASE: allow
VERSION: ${{ needs.init.outputs.version }}
run: |
python3 -m pip install "$(grep '^uv' < requirements.txt)"
uv pip install packaging tomli
uv pip install .
python3 script/version_bump.py nightly --set-nightly-version "${{ needs.init.outputs.version }}"
python3 script/version_bump.py nightly --set-nightly-version "${VERSION}"
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
@@ -185,7 +194,7 @@ jobs:
- name: Write meta info file
shell: bash
run: |
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
echo "${GITHUB_SHA};${GITHUB_REF};${GITHUB_EVENT_NAME};${GITHUB_ACTOR}" > rootfs/OFFICIAL_IMAGE
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -205,26 +214,32 @@ jobs:
- name: Build variables
id: vars
shell: bash
env:
ARCH: ${{ matrix.arch }}
run: |
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
echo "base_image=ghcr.io/home-assistant/${ARCH}-homeassistant-base:${BASE_IMAGE_VERSION}" >> "$GITHUB_OUTPUT"
echo "cache_image=ghcr.io/home-assistant/${ARCH}-homeassistant:latest" >> "$GITHUB_OUTPUT"
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
- name: Verify base image signature
env:
BASE_IMAGE: ${{ steps.vars.outputs.base_image }}
run: |
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
"${{ steps.vars.outputs.base_image }}"
"${BASE_IMAGE}"
- name: Verify cache image signature
id: cache
continue-on-error: true
env:
CACHE_IMAGE: ${{ steps.vars.outputs.cache_image }}
run: |
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
"${{ steps.vars.outputs.cache_image }}"
"${CACHE_IMAGE}"
- name: Build base image
id: build
@@ -246,8 +261,12 @@ jobs:
org.opencontainers.image.version=${{ needs.init.outputs.version }}
- name: Sign image
env:
ARCH: ${{ matrix.arch }}
VERSION: ${{ needs.init.outputs.version }}
DIGEST: ${{ steps.build.outputs.digest }}
run: |
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
cosign sign --yes "ghcr.io/home-assistant/${ARCH}-homeassistant:${VERSION}@${DIGEST}"
build_machine:
name: Build ${{ matrix.machine }} machine core image
@@ -255,9 +274,9 @@ jobs:
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
contents: read # To check out the repository
packages: write # To push to GHCR
id-token: write # For cosign signing
strategy:
matrix:
machine:
@@ -282,11 +301,13 @@ jobs:
persist-credentials: false
- name: Set build additional args
env:
VERSION: ${{ needs.init.outputs.version }}
run: |
# Create general tags
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
if [[ "${VERSION}" =~ d ]]; then
echo "BUILD_ARGS=--additional-tag dev" >> $GITHUB_ENV
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
elif [[ "${VERSION}" =~ b ]]; then
echo "BUILD_ARGS=--additional-tag beta" >> $GITHUB_ENV
else
echo "BUILD_ARGS=--additional-tag stable" >> $GITHUB_ENV
@@ -299,9 +320,8 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
# home-assistant/builder doesn't support sha pinning
- name: Build base image
uses: home-assistant/builder@2025.11.0
uses: home-assistant/builder@21bc64d76dad7a5184c67826aab41c6b6f89023a # 2025.11.0
with:
args: |
$BUILD_ARGS \
@@ -315,6 +335,8 @@ jobs:
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_machine"]
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -322,14 +344,14 @@ jobs:
persist-credentials: false
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
uses: home-assistant/actions/helpers/git-init@master # zizmor: ignore[unpinned-uses]
with:
name: ${{ secrets.GIT_NAME }}
email: ${{ secrets.GIT_EMAIL }}
token: ${{ secrets.GIT_TOKEN }}
- name: Update version file
uses: home-assistant/actions/helpers/version-push@master
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
with:
key: "homeassistant[]"
key-description: "Home Assistant Core"
@@ -339,7 +361,7 @@ jobs:
- name: Update version file (stable -> beta)
if: needs.init.outputs.channel == 'stable'
uses: home-assistant/actions/helpers/version-push@master
uses: home-assistant/actions/helpers/version-push@master # zizmor: ignore[unpinned-uses]
with:
key: "homeassistant[]"
key-description: "Home Assistant Core"
@@ -354,9 +376,9 @@ jobs:
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
contents: read # To check out the repository
packages: write # To push to GHCR
id-token: write # For cosign signing
strategy:
fail-fast: false
matrix:
@@ -383,14 +405,17 @@ jobs:
- name: Verify architecture image signatures
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
VERSION: ${{ needs.init.outputs.version }}
run: |
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
for arch in $ARCHS; do
echo "Verifying ${arch} image signature..."
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"
done
echo "✓ All images verified successfully"
@@ -421,16 +446,19 @@ jobs:
- name: Copy architecture images to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
VERSION: ${{ needs.init.outputs.version }}
run: |
# Use imagetools to copy image blobs directly between registries
# This preserves provenance/attestations and seems to be much faster than pull/push
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
for arch in $ARCHS; do
echo "Copying ${arch} image to DockerHub..."
for attempt in 1 2 3; do
if docker buildx imagetools create \
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"; then
--tag "docker.io/homeassistant/${arch}-homeassistant:${VERSION}" \
"ghcr.io/home-assistant/${arch}-homeassistant:${VERSION}"; then
break
fi
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
@@ -440,23 +468,28 @@ jobs:
exit 1
fi
done
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${VERSION}"
done
- name: Create and push multi-arch manifests
shell: bash
env:
ARCHITECTURES: ${{ needs.init.outputs.architectures }}
REGISTRY: ${{ matrix.registry }}
VERSION: ${{ needs.init.outputs.version }}
META_TAGS: ${{ steps.meta.outputs.tags }}
run: |
# Build list of architecture images dynamically
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
ARCHS=$(echo "${ARCHITECTURES}" | jq -r '.[]')
ARCH_IMAGES=()
for arch in $ARCHS; do
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
ARCH_IMAGES+=("${REGISTRY}/${arch}-homeassistant:${VERSION}")
done
# Build list of all tags for single manifest creation
# Note: Using sep-tags=',' in metadata-action for easier parsing
TAG_ARGS=()
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
IFS=',' read -ra TAGS <<< "${META_TAGS}"
for tag in "${TAGS[@]}"; do
TAG_ARGS+=("--tag" "${tag}")
done
@@ -480,8 +513,8 @@ jobs:
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
contents: read # To check out the repository
id-token: write # For PyPI trusted publishing
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
@@ -521,10 +554,10 @@ jobs:
name: Build and test hassfest image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
attestations: write
id-token: write
contents: read # To check out the repository
packages: write # To push to GHCR
attestations: write # For build provenance attestation
id-token: write # For build provenance attestation
needs: ["init"]
if: github.repository_owner == 'home-assistant'
env:
@@ -552,7 +585,7 @@ jobs:
tags: ${{ env.HASSFEST_IMAGE_TAG }}
- name: Run hassfest against core
run: docker run --rm -v ${{ github.workspace }}:/github/workspace ${{ env.HASSFEST_IMAGE_TAG }} --core-path=/github/workspace
run: docker run --rm -v "${GITHUB_WORKSPACE}":/github/workspace "${HASSFEST_IMAGE_TAG}" --core-path=/github/workspace
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'

View File

@@ -67,6 +67,8 @@ env:
PYTHONASYNCIODEBUG: 1
HASS_CI: 1
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
@@ -75,6 +77,9 @@ jobs:
info:
name: Collect information & changes data
runs-on: ubuntu-24.04
permissions:
contents: read # To check out the repository
pull-requests: read # For paths-filter to detect changed files
outputs:
# In case of issues with the partial run, use the following line instead:
# test_full_suite: 'true'
@@ -101,19 +106,20 @@ jobs:
persist-credentials: false
- name: Generate partial Python venv restore key
id: generate_python_cache_key
env:
HASH_REQUIREMENTS_TEST: ${{ hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}
HASH_REQUIREMENTS: ${{ hashFiles('requirements.txt') }}
HASH_REQUIREMENTS_ALL: ${{ hashFiles('requirements_all.txt') }}
HASH_PACKAGE_CONSTRAINTS: ${{ hashFiles('homeassistant/package_constraints.txt') }}
HASH_GEN_REQUIREMENTS: ${{ hashFiles('script/gen_requirements_all.py') }}
run: |
# Include HA_SHORT_VERSION to force the immediate creation
# of a new uv cache entry after a version bump.
echo "key=venv-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}-${{
hashFiles('requirements_test.txt', 'requirements_test_pre_commit.txt') }}-${{
hashFiles('requirements.txt') }}-${{
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}-${{
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
echo "key=venv-${CACHE_VERSION}-${HA_SHORT_VERSION}-${HASH_REQUIREMENTS_TEST}-${HASH_REQUIREMENTS}-${HASH_REQUIREMENTS_ALL}-${HASH_PACKAGE_CONSTRAINTS}-${HASH_GEN_REQUIREMENTS}" >> $GITHUB_OUTPUT
- name: Generate partial apt restore key
id: generate_apt_cache_key
run: |
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
echo "key=$(lsb_release -rs)-apt-${CACHE_VERSION}-${HA_SHORT_VERSION}" >> $GITHUB_OUTPUT
- name: Filter for core changes
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
id: core
@@ -136,6 +142,18 @@ jobs:
filters: .integration_paths.yaml
- name: Collect additional information
id: info
env:
INTEGRATION_CHANGES: ${{ steps.integrations.outputs.changes }}
CORE_ANY: ${{ steps.core.outputs.any }}
INPUT_FULL: ${{ github.event.inputs.full }}
HAS_CI_FULL_RUN_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'ci-full-run') }}
INPUT_LINT_ONLY: ${{ github.event.inputs.lint-only }}
INPUT_PYLINT_ONLY: ${{ github.event.inputs.pylint-only }}
INPUT_MYPY_ONLY: ${{ github.event.inputs.mypy-only }}
INPUT_AUDIT_LICENSES_ONLY: ${{ github.event.inputs.audit-licenses-only }}
REPO_FULL_NAME: ${{ github.event.repository.full_name }}
INPUT_SKIP_COVERAGE: ${{ github.event.inputs.skip-coverage }}
HAS_CI_SKIP_COVERAGE_LABEL: ${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}
run: |
# Defaults
integrations_glob=""
@@ -149,14 +167,13 @@ jobs:
lint_only=""
skip_coverage=""
if [[ "${{ steps.integrations.outputs.changes }}" != "[]" ]];
if [[ "${INTEGRATION_CHANGES}" != "[]" ]];
then
# Create a file glob for the integrations
integrations_glob=$(echo '${{ steps.integrations.outputs.changes }}' | jq -cSr '. | join(",")')
[[ "${integrations_glob}" == *","* ]] && integrations_glob="{${integrations_glob}}"
# Create a space-separated list of integrations
integrations_glob=$(echo "${INTEGRATION_CHANGES}" | jq -r '. | join(" ")')
# Create list of testable integrations
possible_integrations=$(echo '${{ steps.integrations.outputs.changes }}' | jq -cSr '.[]')
possible_integrations=$(echo "${INTEGRATION_CHANGES}" | jq -cSr '.[]')
tests=$(
for integration in ${possible_integrations};
do
@@ -172,9 +189,8 @@ jobs:
# Test group count should be 1, we don't split partial tests
test_group_count=1
# Create a file glob for the integrations tests
tests_glob=$(echo "${tests}" | jq -cSr '. | join(",")')
[[ "${tests_glob}" == *","* ]] && tests_glob="{${tests_glob}}"
# Create a space-separated list of test integrations
tests_glob=$(echo "${tests}" | jq -r '. | join(" ")')
mariadb_groups="[]"
postgresql_groups="[]"
@@ -183,12 +199,12 @@ jobs:
# We need to run the full suite on certain branches.
# Or, in case core files are touched, for the full suite as well.
if [[ "${{ github.ref }}" == "refs/heads/dev" ]] \
|| [[ "${{ github.ref }}" == "refs/heads/master" ]] \
|| [[ "${{ github.ref }}" == "refs/heads/rc" ]] \
|| [[ "${{ steps.core.outputs.any }}" == "true" ]] \
|| [[ "${{ github.event.inputs.full }}" == "true" ]] \
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-full-run') }}" == "true" ]];
if [[ "${GITHUB_REF}" == "refs/heads/dev" ]] \
|| [[ "${GITHUB_REF}" == "refs/heads/master" ]] \
|| [[ "${GITHUB_REF}" == "refs/heads/rc" ]] \
|| [[ "${CORE_ANY}" == "true" ]] \
|| [[ "${INPUT_FULL}" == "true" ]] \
|| [[ "${HAS_CI_FULL_RUN_LABEL}" == "true" ]];
then
mariadb_groups=${MARIADB_VERSIONS}
postgresql_groups=${POSTGRESQL_VERSIONS}
@@ -197,19 +213,19 @@ jobs:
test_full_suite="true"
fi
if [[ "${{ github.event.inputs.lint-only }}" == "true" ]] \
|| [[ "${{ github.event.inputs.pylint-only }}" == "true" ]] \
|| [[ "${{ github.event.inputs.mypy-only }}" == "true" ]] \
|| [[ "${{ github.event.inputs.audit-licenses-only }}" == "true" ]] \
|| [[ "${{ github.event_name }}" == "push" \
&& "${{ github.event.repository.full_name }}" != "home-assistant/core" ]];
if [[ "${INPUT_LINT_ONLY}" == "true" ]] \
|| [[ "${INPUT_PYLINT_ONLY}" == "true" ]] \
|| [[ "${INPUT_MYPY_ONLY}" == "true" ]] \
|| [[ "${INPUT_AUDIT_LICENSES_ONLY}" == "true" ]] \
|| [[ "${GITHUB_EVENT_NAME}" == "push" \
&& "${REPO_FULL_NAME}" != "home-assistant/core" ]];
then
lint_only="true"
skip_coverage="true"
fi
if [[ "${{ github.event.inputs.skip-coverage }}" == "true" ]] \
|| [[ "${{ contains(github.event.pull_request.labels.*.name, 'ci-skip-coverage') }}" == "true" ]];
if [[ "${INPUT_SKIP_COVERAGE}" == "true" ]] \
|| [[ "${HAS_CI_SKIP_COVERAGE_LABEL}" == "true" ]];
then
skip_coverage="true"
fi
@@ -241,6 +257,8 @@ jobs:
prek:
name: Run prek checks
runs-on: ubuntu-24.04
permissions:
contents: read
needs: [info]
if: |
github.event.inputs.pylint-only != 'true'
@@ -260,12 +278,34 @@ jobs:
- name: Run prek
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
env:
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config,zizmor
RUFF_OUTPUT_FORMAT: github
zizmor:
name: Check GitHub Actions workflows
runs-on: ubuntu-24.04
permissions:
contents: read # To check out the repository
needs: [info]
if: |
github.event.inputs.pylint-only != 'true'
&& github.event.inputs.mypy-only != 'true'
&& github.event.inputs.audit-licenses-only != 'true'
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Run zizmor
uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # v1.1.1
with:
extra-args: --all-files zizmor
lint-hadolint:
name: Check ${{ matrix.file }}
runs-on: ubuntu-24.04
permissions:
contents: read
needs: [info]
if: |
github.event.inputs.pylint-only != 'true'
@@ -287,13 +327,15 @@ jobs:
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
- name: Check ${{ matrix.file }}
uses: docker://hadolint/hadolint:v2.12.0
uses: docker://hadolint/hadolint:v2.12.0@sha256:30a8fd2e785ab6176eed53f74769e04f125afb2f74a6c52aef7d463583b6d45e
with:
args: hadolint ${{ matrix.file }}
base:
name: Prepare dependencies
runs-on: ubuntu-24.04
permissions:
contents: read
needs: [info]
timeout-minutes: 60
strategy:
@@ -315,8 +357,7 @@ jobs:
run: |
uv_version=$(cat requirements.txt | grep uv | cut -d '=' -f 3)
echo "version=${uv_version}" >> $GITHUB_OUTPUT
echo "key=uv-${{ env.UV_CACHE_VERSION }}-${uv_version}-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
echo "key=uv-${UV_CACHE_VERSION}-${uv_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
@@ -352,19 +393,21 @@ jobs:
steps.cache-venv.outputs.cache-hit != 'true'
|| steps.cache-apt-check.outputs.cache-hit != 'true'
timeout-minutes: 10
env:
APT_CACHE_HIT: ${{ steps.cache-apt-check.outputs.cache-hit }}
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
mkdir -p ${{ env.APT_CACHE_DIR }}
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
if [[ "${APT_CACHE_HIT}" != 'true' ]]; then
mkdir -p ${APT_CACHE_DIR}
mkdir -p ${APT_LIST_CACHE_DIR}
fi
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
bluez \
ffmpeg \
libturbojpeg \
@@ -378,8 +421,8 @@ jobs:
libswscale-dev \
libudev-dev
if [[ "${{ steps.cache-apt-check.outputs.cache-hit }}" != 'true' ]]; then
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
if [[ "${APT_CACHE_HIT}" != 'true' ]]; then
sudo chmod -R 755 ${APT_CACHE_BASE}
fi
- name: Save apt cache
if: steps.cache-apt-check.outputs.cache-hit != 'true'
@@ -426,6 +469,8 @@ jobs:
hassfest:
name: Check hassfest
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -448,11 +493,11 @@ jobs:
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
libturbojpeg
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -481,6 +526,8 @@ jobs:
gen-requirements-all:
name: Check all requirements
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -516,6 +563,8 @@ jobs:
gen-copilot-instructions:
name: Check copilot instructions
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
if: |
@@ -540,6 +589,8 @@ jobs:
dependency-review:
name: Dependency review
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -561,6 +612,8 @@ jobs:
audit-licenses:
name: Audit licenses
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -594,22 +647,28 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Extract license data
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
. venv/bin/activate
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
python -m script.licenses extract --output-file=licenses-${PYTHON_VERSION}.json
- name: Upload licenses
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
path: licenses-${{ matrix.python-version }}.json
- name: Check licenses
env:
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
. venv/bin/activate
python -m script.licenses check licenses-${{ matrix.python-version }}.json
python -m script.licenses check licenses-${PYTHON_VERSION}.json
pylint:
name: Check pylint
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -650,14 +709,18 @@ jobs:
- name: Run pylint (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
env:
INTEGRATIONS_GLOB: ${{ needs.info.outputs.integrations_glob }}
run: |
. venv/bin/activate
python --version
pylint --ignore-missing-annotations=y homeassistant/components/${{ needs.info.outputs.integrations_glob }}
pylint --ignore-missing-annotations=y $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
pylint-tests:
name: Check pylint on tests
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -699,14 +762,18 @@ jobs:
- name: Run pylint (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
env:
TESTS_GLOB: ${{ needs.info.outputs.tests_glob }}
run: |
. venv/bin/activate
python --version
pylint tests/components/${{ needs.info.outputs.tests_glob }}
pylint $(printf "tests/components/%s " ${TESTS_GLOB})
mypy:
name: Check mypy
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -729,9 +796,8 @@ jobs:
id: generate-mypy-key
run: |
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
echo "version=$mypy_version" >> $GITHUB_OUTPUT
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
echo "version=${mypy_version}" >> $GITHUB_OUTPUT
echo "key=mypy-${MYPY_CACHE_VERSION}-${mypy_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
@@ -764,14 +830,18 @@ jobs:
- name: Run mypy (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
env:
INTEGRATIONS_GLOB: ${{ needs.info.outputs.integrations_glob }}
run: |
. venv/bin/activate
python --version
mypy homeassistant/components/${{ needs.info.outputs.integrations_glob }}
mypy $(printf "homeassistant/components/%s " ${INTEGRATIONS_GLOB})
prepare-pytest-full:
name: Split tests for full run
runs-on: ubuntu-24.04
permissions:
contents: read
if: |
needs.info.outputs.lint_only != 'true'
&& needs.info.outputs.test_full_suite == 'true'
@@ -797,11 +867,11 @@ jobs:
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
bluez \
ffmpeg \
libturbojpeg
@@ -825,9 +895,11 @@ jobs:
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.python_cache_key }}
- name: Run split_tests.py
env:
TEST_GROUP_COUNT: ${{ needs.info.outputs.test_group_count }}
run: |
. venv/bin/activate
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
python -m script.split_tests ${TEST_GROUP_COUNT} tests
- name: Upload pytest_buckets
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
@@ -838,6 +910,8 @@ jobs:
pytest-full:
name: Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -869,11 +943,11 @@ jobs:
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
bluez \
ffmpeg \
libturbojpeg \
@@ -916,18 +990,21 @@ jobs:
id: pytest-full
env:
PYTHONDONTWRITEBYTECODE: 1
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
TEST_GROUP: ${{ matrix.group }}
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
. venv/bin/activate
python --version
set -o pipefail
cov_params=()
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
cov_params+=(--cov="homeassistant")
cov_params+=(--cov-report=xml)
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
fi
echo "Test group ${{ matrix.group }}: $(sed -n "${{ matrix.group }},1p" pytest_buckets.txt)"
echo "Test group ${TEST_GROUP}: $(sed -n "${TEST_GROUP},1p" pytest_buckets.txt)"
python3 -b -X dev -m pytest \
-qq \
--timeout=9 \
@@ -939,8 +1016,8 @@ jobs:
-o console_output_style=count \
-p no:sugar \
--exclude-warning-annotations \
$(sed -n "${{ matrix.group }},1p" pytest_buckets.txt) \
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
$(sed -n "${TEST_GROUP},1p" pytest_buckets.txt) \
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
@@ -976,9 +1053,11 @@ jobs:
pytest-mariadb:
name: Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
runs-on: ubuntu-24.04
permissions:
contents: read
services:
mariadb:
image: ${{ matrix.mariadb-group }}
image: ${{ matrix.mariadb-group }} # zizmor: ignore[unpinned-images]
ports:
- 3306:3306
env:
@@ -1014,11 +1093,11 @@ jobs:
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
bluez \
ffmpeg \
libturbojpeg \
@@ -1063,14 +1142,17 @@ jobs:
shell: bash
env:
PYTHONDONTWRITEBYTECODE: 1
MARIADB_GROUP: ${{ matrix.mariadb-group }}
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
. venv/bin/activate
python --version
set -o pipefail
mariadb=$(echo "${{ matrix.mariadb-group }}" | sed "s/:/-/g")
mariadb=$(echo "${MARIADB_GROUP}" | sed "s/:/-/g")
echo "mariadb=${mariadb}" >> $GITHUB_OUTPUT
cov_params=()
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
cov_params+=(--cov="homeassistant.components.recorder")
cov_params+=(--cov-report=xml)
cov_params+=(--cov-report=term-missing)
@@ -1092,7 +1174,7 @@ jobs:
tests/components/logbook \
tests/components/recorder \
tests/components/sensor \
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
2>&1 | tee pytest-${PYTHON_VERSION}-${mariadb}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
@@ -1129,9 +1211,11 @@ jobs:
pytest-postgres:
name: Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
runs-on: ubuntu-24.04
permissions:
contents: read
services:
postgres:
image: ${{ matrix.postgresql-group }}
image: ${{ matrix.postgresql-group }} # zizmor: ignore[unpinned-images]
ports:
- 5432:5432
env:
@@ -1167,11 +1251,11 @@ jobs:
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
bluez \
ffmpeg \
libturbojpeg \
@@ -1218,14 +1302,17 @@ jobs:
shell: bash
env:
PYTHONDONTWRITEBYTECODE: 1
POSTGRESQL_GROUP: ${{ matrix.postgresql-group }}
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
. venv/bin/activate
python --version
set -o pipefail
postgresql=$(echo "${{ matrix.postgresql-group }}" | sed "s/:/-/g")
postgresql=$(echo "${POSTGRESQL_GROUP}" | sed "s/:/-/g")
echo "postgresql=${postgresql}" >> $GITHUB_OUTPUT
cov_params=()
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
cov_params+=(--cov="homeassistant.components.recorder")
cov_params+=(--cov-report=xml)
cov_params+=(--cov-report=term-missing)
@@ -1248,7 +1335,7 @@ jobs:
tests/components/logbook \
tests/components/recorder \
tests/components/sensor \
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
2>&1 | tee pytest-${PYTHON_VERSION}-${postgresql}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
@@ -1285,6 +1372,8 @@ jobs:
coverage-full:
name: Upload test coverage to Codecov (full suite)
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- pytest-full
@@ -1312,6 +1401,8 @@ jobs:
pytest-partial:
name: Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
runs-on: ubuntu-24.04
permissions:
contents: read
needs:
- info
- base
@@ -1343,11 +1434,11 @@ jobs:
run: |
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
sudo apt-get update \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR}
sudo apt-get -y install \
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
-o Dir::Cache=${APT_CACHE_DIR} \
-o Dir::State::Lists=${APT_LIST_CACHE_DIR} \
bluez \
ffmpeg \
libturbojpeg \
@@ -1387,19 +1478,22 @@ jobs:
shell: bash
env:
PYTHONDONTWRITEBYTECODE: 1
TEST_GROUP: ${{ matrix.group }}
SKIP_COVERAGE: ${{ needs.info.outputs.skip_coverage }}
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
. venv/bin/activate
python --version
set -o pipefail
if [[ ! -f "tests/components/${{ matrix.group }}/__init__.py" ]]; then
echo "::error:: missing file tests/components/${{ matrix.group }}/__init__.py"
if [[ ! -f "tests/components/${TEST_GROUP}/__init__.py" ]]; then
echo "::error:: missing file tests/components/${TEST_GROUP}/__init__.py"
exit 1
fi
cov_params=()
if [[ "${{ needs.info.outputs.skip_coverage }}" != "true" ]]; then
cov_params+=(--cov="homeassistant.components.${{ matrix.group }}")
if [[ "${SKIP_COVERAGE}" != "true" ]]; then
cov_params+=(--cov="homeassistant.components.${TEST_GROUP}")
cov_params+=(--cov-report=xml)
cov_params+=(--cov-report=term-missing)
cov_params+=(--junitxml=junit.xml -o junit_family=legacy)
@@ -1416,8 +1510,8 @@ jobs:
--durations-min=1 \
-p no:sugar \
--exclude-warning-annotations \
tests/components/${{ matrix.group }} \
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
tests/components/${TEST_GROUP} \
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
- name: Upload pytest output
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
@@ -1452,6 +1546,8 @@ jobs:
name: Upload test coverage to Codecov (partial suite)
if: needs.info.outputs.skip_coverage != 'true'
runs-on: ubuntu-24.04
permissions:
contents: read
timeout-minutes: 10
needs:
- info
@@ -1483,7 +1579,7 @@ jobs:
- pytest-mariadb
timeout-minutes: 10
permissions:
id-token: write
id-token: write # For Codecov OIDC upload
# codecov/test-results-action currently doesn't support tokenless uploads
# therefore we can't run it on forks
if: |

View File

@@ -5,6 +5,8 @@ on:
schedule:
- cron: "30 18 * * 4"
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
@@ -15,9 +17,9 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 360
permissions:
actions: read
contents: read
security-events: write
actions: read # To read workflow information for CodeQL
contents: read # To check out the repository
security-events: write # To upload CodeQL results
steps:
- name: Check out code from GitHub
@@ -26,11 +28,11 @@ jobs:
persist-credentials: false
- name: Initialize CodeQL
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
uses: github/codeql-action/init@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2
uses: github/codeql-action/analyze@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
with:
category: "/language:python"

View File

@@ -5,13 +5,18 @@ on:
issues:
types: [labeled]
permissions:
issues: write
models: read
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
jobs:
detect-duplicates:
name: Detect duplicate issues
runs-on: ubuntu-latest
permissions:
issues: write # To comment on and label issues
models: read # For AI-based duplicate detection
steps:
- name: Check if integration label was added and extract details

View File

@@ -5,13 +5,18 @@ on:
issues:
types: [opened]
permissions:
issues: write
models: read
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
jobs:
detect-language:
name: Detect non-English issues
runs-on: ubuntu-latest
permissions:
issues: write # To comment on, label, and close issues
models: read # For AI-based language detection
steps:
- name: Check issue language

View File

@@ -5,10 +5,20 @@ on:
schedule:
- cron: "0 * * * *"
permissions: {}
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
jobs:
lock:
name: Lock inactive threads
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
permissions:
issues: write # To lock issues
pull-requests: write # To lock pull requests
steps:
- uses: dessant/lock-threads@7266a7ce5c1df01b1c6db85bf8cd86c737dadbe7 # v6.0.0
with:

View File

@@ -5,9 +5,39 @@ on:
issues:
types: [opened]
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.event.issue.number }}
jobs:
check-authorization:
add-no-stale:
name: Add no-stale label
runs-on: ubuntu-latest
permissions:
issues: write # To add labels to issues
if: >-
github.event.issue.type.name == 'Task'
|| github.event.issue.type.name == 'Epic'
|| github.event.issue.type.name == 'Opportunity'
steps:
- name: Add no-stale label
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['no-stale']
});
check-authorization:
name: Check authorization
runs-on: ubuntu-latest
permissions:
contents: read # To read CODEOWNERS file
issues: write # To comment on, label, and close issues
# Only run if this is a Task issue type (from the issue form)
if: github.event.issue.type.name == 'Task'
steps:

View File

@@ -6,10 +6,20 @@ on:
- cron: "0 * * * *"
workflow_dispatch:
permissions: {}
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
jobs:
stale:
name: Mark stale issues and PRs
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
permissions:
issues: write # To label and close stale issues
pull-requests: write # To label and close stale PRs
steps:
# The 60 day stale policy for PRs
# Used for:

View File

@@ -9,6 +9,12 @@ on:
paths:
- "**strings.json"
permissions: {}
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
env:
DEFAULT_PYTHON: "3.14.2"
@@ -29,6 +35,7 @@ jobs:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Upload Translations
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
run: |
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
python3 -m script.translations upload

View File

@@ -19,6 +19,8 @@ on:
env:
DEFAULT_PYTHON: "3.14.2"
permissions: {}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name}}
cancel-in-progress: true
@@ -51,7 +53,7 @@ jobs:
- name: Create requirements_diff file
run: |
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
if [[ "${GITHUB_EVENT_NAME}" =~ (schedule|workflow_dispatch) ]]; then
touch requirements_diff.txt
else
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/core/master/requirements.txt

View File

@@ -17,6 +17,12 @@ repos:
- --quiet-level=2
exclude_types: [csv, json, html]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
- repo: https://github.com/zizmorcore/zizmor-pre-commit
rev: v1.22.0
hooks:
- id: zizmor
args:
- --pedantic
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:

2
CODEOWNERS generated
View File

@@ -1068,6 +1068,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
/homeassistant/components/msteams/ @peroyvind
/homeassistant/components/mta/ @OnFreund
/tests/components/mta/ @OnFreund
/homeassistant/components/mullvad/ @meichthys
/tests/components/mullvad/ @meichthys
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz

View File

@@ -0,0 +1,108 @@
rules:
# Bronze
action-setup:
status: todo
comment: https://developers.home-assistant.io/blog/2025/09/25/entity-services-api-changes/
appropriate-polling: done
brands: done
common-modules:
status: todo
comment: |
Move coordinator from __init__.py to coordinator.py.
Consider using entity descriptions for binary_sensor and switch.
Consider simplifying climate supported features flow.
config-flow-test-coverage:
status: todo
comment: |
Add mock_setup_entry common fixture.
Test unique_id of the entry in happy flow.
Split duplicate entry test from happy flow, use mock_config_entry.
Error flow should end in CREATE_ENTRY to test recovery.
Add data_description for ip_address (and port) to strings.json - tests fail with:
"Translation not found for advantage_air: config.step.user.data_description.ip_address"
config-flow:
status: todo
comment: Data descriptions missing
dependency-transparency: done
docs-actions: done
docs-high-level-description: done
docs-installation-instructions: todo
docs-removal-instructions: todo
entity-event-setup:
status: exempt
comment: Entities do not explicitly subscribe to events.
entity-unique-id: done
has-entity-name: done
runtime-data:
status: done
comment: Consider extending coordinator to access API via coordinator and remove extra dataclass.
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: No options to be set.
docs-installation-parameters: done
entity-unavailable:
status: todo
comment: MyZone temp entity should be unavailable when MyZone is disabled rather than returning None.
integration-owner: done
log-when-unavailable: todo
parallel-updates: todo
reauthentication-flow:
status: exempt
comment: Integration connects to local device without authentication.
test-coverage:
status: todo
comment: |
Patch the library instead of mocking at integration level.
Split binary sensor tests into multiple tests (enable entities etc).
Split tests into Creation (right entities with right values), Actions (right library calls), and Other behaviors.
# Gold
devices:
status: todo
comment: Consider making every zone its own device for better naming and room assignment. Breaking change to split cover entities to separate devices.
diagnostics: done
discovery-update-info:
status: exempt
comment: Device is a generic Android device (android-xxxxxxxx) indistinguishable from other Android devices, not discoverable.
discovery:
status: exempt
comment: Check mDNS, DHCP, SSDP confirmed not feasible. Device is a generic Android device (android-xxxxxxxx) indistinguishable from other Android devices.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: done
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: AC zones are static per unit and configured on the device itself.
entity-category: done
entity-device-class:
status: todo
comment: Consider using UPDATE device class for app update binary sensor instead of custom.
entity-disabled-by-default: done
entity-translations: todo
exception-translations:
status: todo
comment: UpdateFailed in the coordinator
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: Integration does not raise repair issues.
stale-devices:
status: exempt
comment: Zones are part of the AC unit, not separate removable devices.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -120,7 +120,7 @@ class AOSmithWaterHeaterEntity(AOSmithStatusEntity, WaterHeaterEntity):
return MODE_AOSMITH_TO_HA.get(self.device.status.current_mode, STATE_OFF)
@property
def is_away_mode_on(self):
def is_away_mode_on(self) -> bool:
"""Return True if away mode is on."""
return self.device.status.current_mode == AOSmithOperationMode.VACATION

View File

@@ -37,15 +37,15 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
_attr_temperature_unit = UnitOfTemperature.CELSIUS
@property
def current_temperature(self):
def current_temperature(self) -> float:
"""Return the current temperature."""
return self.coordinator.atag.dhw.temperature
@property
def current_operation(self):
def current_operation(self) -> str:
"""Return current operation."""
operation = self.coordinator.atag.dhw.current_operation
return operation if operation in self.operation_list else STATE_OFF
return operation if operation in OPERATION_LIST else STATE_OFF
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
@@ -53,7 +53,7 @@ class AtagWaterHeater(AtagEntity, WaterHeaterEntity):
self.async_write_ha_state()
@property
def target_temperature(self):
def target_temperature(self) -> float:
"""Return the setpoint if water demand, otherwise return base temp (comfort level)."""
return self.coordinator.atag.dhw.target_temperature

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.13.0", "openai==2.21.0"],
"requirements": ["hass-nabucasa==1.15.0", "openai==2.21.0"],
"single_config_entry": true
}

View File

@@ -5,7 +5,7 @@ import functools
import json
import logging
from time import time
from typing import Any
from typing import Any, cast
from botocore.exceptions import BotoCoreError
@@ -190,58 +190,77 @@ class R2BackupAgent(BackupAgent):
:param open_stream: A function returning an async iterator that yields bytes.
"""
_LOGGER.debug("Starting multipart upload for %s", tar_filename)
key = self._with_prefix(tar_filename)
multipart_upload = await self._client.create_multipart_upload(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
Key=key,
)
upload_id = multipart_upload["UploadId"]
try:
parts: list[dict[str, Any]] = []
part_number = 1
buffer = bytearray() # bytes buffer to store the data
offset = 0 # start index of unread data inside buffer
stream = await open_stream()
async for chunk in stream:
buffer.extend(chunk)
# upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
# all non-trailing parts have the same size (required by S3/R2)
while len(buffer) >= MULTIPART_MIN_PART_SIZE_BYTES:
part_data = bytes(buffer[:MULTIPART_MIN_PART_SIZE_BYTES])
del buffer[:MULTIPART_MIN_PART_SIZE_BYTES]
# Upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
# all non-trailing parts have the same size (defensive implementation)
view = memoryview(buffer)
try:
while len(buffer) - offset >= MULTIPART_MIN_PART_SIZE_BYTES:
start = offset
end = offset + MULTIPART_MIN_PART_SIZE_BYTES
part_data = view[start:end]
offset = end
_LOGGER.debug(
"Uploading part number %d, size %d",
part_number,
len(part_data),
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
PartNumber=part_number,
UploadId=upload_id,
Body=part_data,
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
_LOGGER.debug(
"Uploading part number %d, size %d",
part_number,
len(part_data),
)
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=key,
PartNumber=part_number,
UploadId=upload_id,
Body=part_data.tobytes(),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
finally:
view.release()
# Compact the buffer if the consumed offset has grown large enough. This
# avoids unnecessary memory copies when compacting after every part upload.
if offset and offset >= MULTIPART_MIN_PART_SIZE_BYTES:
buffer = bytearray(buffer[offset:])
offset = 0
# Upload the final buffer as the last part (no minimum size requirement)
if buffer:
# Offset should be 0 after the last compaction, but we use it as the start
# index to be defensive in case the buffer was not compacted.
if offset < len(buffer):
remaining_data = memoryview(buffer)[offset:]
_LOGGER.debug(
"Uploading final part number %d, size %d", part_number, len(buffer)
"Uploading final part number %d, size %d",
part_number,
len(remaining_data),
)
part = await self._client.upload_part(
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
Key=key,
PartNumber=part_number,
UploadId=upload_id,
Body=bytes(buffer),
Body=remaining_data.tobytes(),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
await self._client.complete_multipart_upload(
await cast(Any, self._client).complete_multipart_upload(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
Key=key,
UploadId=upload_id,
MultipartUpload={"Parts": parts},
)
@@ -250,7 +269,7 @@ class R2BackupAgent(BackupAgent):
try:
await self._client.abort_multipart_upload(
Bucket=self._bucket,
Key=self._with_prefix(tar_filename),
Key=key,
UploadId=upload_id,
)
except BotoCoreError:

View File

@@ -598,7 +598,7 @@ class DefaultAgent(ConversationEntity):
error_response_type, error_response_args = _get_match_error_response(
self.hass, match_error
)
return _make_error_result(
intent_response = _make_error_result(
language,
intent.IntentResponseErrorCode.NO_VALID_TARGETS,
self._get_error_text(
@@ -609,7 +609,7 @@ class DefaultAgent(ConversationEntity):
# Intent was valid and entities matched constraints, but an error
# occurred during handling.
_LOGGER.exception("Intent handling error")
return _make_error_result(
intent_response = _make_error_result(
language,
intent.IntentResponseErrorCode.FAILED_TO_HANDLE,
self._get_error_text(
@@ -618,7 +618,7 @@ class DefaultAgent(ConversationEntity):
)
except intent.IntentUnexpectedError:
_LOGGER.exception("Unexpected intent error")
return _make_error_result(
intent_response = _make_error_result(
language,
intent.IntentResponseErrorCode.UNKNOWN,
self._get_error_text(ErrorKey.HANDLE_ERROR, lang_intents),

View File

@@ -15,7 +15,7 @@ from homeassistant.helpers import config_validation as cv, singleton, storage
from .const import DOMAIN
STORAGE_VERSION = 1
STORAGE_MINOR_VERSION = 2
STORAGE_MINOR_VERSION = 3
STORAGE_KEY = DOMAIN
@@ -92,8 +92,11 @@ class GridPowerSourceType(TypedDict, total=False):
power_config: PowerConfig
class GridSourceType(TypedDict):
"""Dictionary holding the source of grid energy consumption."""
class LegacyGridSourceType(TypedDict):
"""Legacy dictionary holding the source of grid energy consumption.
This format is deprecated and will be migrated to GridSourceType.
"""
type: Literal["grid"]
@@ -104,6 +107,40 @@ class GridSourceType(TypedDict):
cost_adjustment_day: float
class GridSourceType(TypedDict):
"""Dictionary holding a unified grid connection (like batteries).
Each grid connection represents a single import/export pair with
optional power tracking. Multiple grid sources are allowed.
"""
type: Literal["grid"]
# Import meter - kWh consumed from grid
# Can be None for export-only or power-only grids migrated from legacy format
stat_energy_from: str | None
# Export meter (optional) - kWh returned to grid (solar/battery export)
stat_energy_to: str | None
# Cost tracking for import
stat_cost: str | None # statistic_id of costs ($) incurred
entity_energy_price: str | None # entity_id providing price ($/kWh)
number_energy_price: float | None # Fixed price ($/kWh)
# Compensation tracking for export
stat_compensation: str | None # statistic_id of compensation ($) received
entity_energy_price_export: str | None # entity_id providing export price ($/kWh)
number_energy_price_export: float | None # Fixed export price ($/kWh)
# Power measurement (optional)
# positive when consuming from grid, negative when exporting
stat_rate: NotRequired[str]
power_config: NotRequired[PowerConfig]
cost_adjustment_day: float
class SolarSourceType(TypedDict):
"""Dictionary holding the source of energy production."""
@@ -308,23 +345,77 @@ def _generate_unique_value_validator(key: str) -> Callable[[list[dict]], list[di
return validate_uniqueness
GRID_SOURCE_SCHEMA = vol.Schema(
{
vol.Required("type"): "grid",
vol.Required("flow_from"): vol.All(
[FLOW_FROM_GRID_SOURCE_SCHEMA],
_generate_unique_value_validator("stat_energy_from"),
),
vol.Required("flow_to"): vol.All(
[FLOW_TO_GRID_SOURCE_SCHEMA],
_generate_unique_value_validator("stat_energy_to"),
),
vol.Optional("power"): vol.All(
[GRID_POWER_SOURCE_SCHEMA],
_generate_unique_value_validator("stat_rate"),
),
vol.Required("cost_adjustment_day"): vol.Coerce(float),
}
def _grid_ensure_single_price_import(
val: dict[str, Any],
) -> dict[str, Any]:
"""Ensure we use a single price source for import."""
if (
val.get("entity_energy_price") is not None
and val.get("number_energy_price") is not None
):
raise vol.Invalid("Define either an entity or a fixed number for import price")
return val
def _grid_ensure_single_price_export(
val: dict[str, Any],
) -> dict[str, Any]:
"""Ensure we use a single price source for export."""
if (
val.get("entity_energy_price_export") is not None
and val.get("number_energy_price_export") is not None
):
raise vol.Invalid("Define either an entity or a fixed number for export price")
return val
def _grid_ensure_at_least_one_stat(
val: dict[str, Any],
) -> dict[str, Any]:
"""Ensure at least one of import, export, or power is configured."""
if (
val.get("stat_energy_from") is None
and val.get("stat_energy_to") is None
and val.get("stat_rate") is None
and val.get("power_config") is None
):
raise vol.Invalid(
"Grid must have at least one of: import meter, export meter, or power sensor"
)
return val
GRID_SOURCE_SCHEMA = vol.All(
vol.Schema(
{
vol.Required("type"): "grid",
# Import meter (can be None for export-only grids from legacy migration)
vol.Optional("stat_energy_from", default=None): vol.Any(str, None),
# Export meter (optional)
vol.Optional("stat_energy_to", default=None): vol.Any(str, None),
# Import cost tracking
vol.Optional("stat_cost", default=None): vol.Any(str, None),
vol.Optional("entity_energy_price", default=None): vol.Any(str, None),
vol.Optional("number_energy_price", default=None): vol.Any(
vol.Coerce(float), None
),
# Export compensation tracking
vol.Optional("stat_compensation", default=None): vol.Any(str, None),
vol.Optional("entity_energy_price_export", default=None): vol.Any(
str, None
),
vol.Optional("number_energy_price_export", default=None): vol.Any(
vol.Coerce(float), None
),
# Power measurement (optional)
vol.Optional("stat_rate"): str,
vol.Optional("power_config"): POWER_CONFIG_SCHEMA,
vol.Required("cost_adjustment_day"): vol.Coerce(float),
}
),
_grid_ensure_single_price_import,
_grid_ensure_single_price_export,
_grid_ensure_at_least_one_stat,
)
SOLAR_SOURCE_SCHEMA = vol.Schema(
{
@@ -369,10 +460,46 @@ WATER_SOURCE_SCHEMA = vol.Schema(
def check_type_limits(value: list[SourceType]) -> list[SourceType]:
"""Validate that we don't have too many of certain types."""
types = Counter([val["type"] for val in value])
# Currently no type limits - multiple grid sources are allowed (like batteries)
return value
if types.get("grid", 0) > 1:
raise vol.Invalid("You cannot have more than 1 grid source")
def _validate_grid_stat_uniqueness(value: list[SourceType]) -> list[SourceType]:
"""Validate that grid statistics are unique across all sources."""
seen_import: set[str] = set()
seen_export: set[str] = set()
seen_rate: set[str] = set()
for source in value:
if source.get("type") != "grid":
continue
# Cast to GridSourceType since we've filtered for grid type
grid_source: GridSourceType = source # type: ignore[assignment]
# Check import meter uniqueness
if (stat_from := grid_source.get("stat_energy_from")) is not None:
if stat_from in seen_import:
raise vol.Invalid(
f"Import meter {stat_from} is used in multiple grid connections"
)
seen_import.add(stat_from)
# Check export meter uniqueness
if (stat_to := grid_source.get("stat_energy_to")) is not None:
if stat_to in seen_export:
raise vol.Invalid(
f"Export meter {stat_to} is used in multiple grid connections"
)
seen_export.add(stat_to)
# Check power stat uniqueness
if (stat_rate := grid_source.get("stat_rate")) is not None:
if stat_rate in seen_rate:
raise vol.Invalid(
f"Power stat {stat_rate} is used in multiple grid connections"
)
seen_rate.add(stat_rate)
return value
@@ -393,6 +520,7 @@ ENERGY_SOURCE_SCHEMA = vol.All(
]
),
check_type_limits,
_validate_grid_stat_uniqueness,
)
DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
@@ -405,6 +533,82 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
)
def _migrate_legacy_grid_to_unified(
old_grid: dict[str, Any],
) -> list[dict[str, Any]]:
"""Migrate legacy grid format (flow_from/flow_to/power arrays) to unified format.
Each grid connection can have any combination of import, export, and power -
all are optional as long as at least one is configured.
Migration pairs arrays by index position:
- flow_from[i], flow_to[i], and power[i] combine into grid connection i
- If arrays have different lengths, missing entries get None for that field
- The number of grid connections equals max(len(flow_from), len(flow_to), len(power))
"""
flow_from = old_grid.get("flow_from", [])
flow_to = old_grid.get("flow_to", [])
power_list = old_grid.get("power", [])
cost_adj = old_grid.get("cost_adjustment_day", 0.0)
new_sources: list[dict[str, Any]] = []
# Number of grid connections = max length across all three arrays
# If all arrays are empty, don't create any grid sources
max_len = max(len(flow_from), len(flow_to), len(power_list))
if max_len == 0:
return []
for i in range(max_len):
source: dict[str, Any] = {
"type": "grid",
"cost_adjustment_day": cost_adj,
}
# Import fields from flow_from
if i < len(flow_from):
ff = flow_from[i]
source["stat_energy_from"] = ff.get("stat_energy_from") or None
source["stat_cost"] = ff.get("stat_cost")
source["entity_energy_price"] = ff.get("entity_energy_price")
source["number_energy_price"] = ff.get("number_energy_price")
else:
# Export-only entry - set import to None (validation will flag this)
source["stat_energy_from"] = None
source["stat_cost"] = None
source["entity_energy_price"] = None
source["number_energy_price"] = None
# Export fields from flow_to
if i < len(flow_to):
ft = flow_to[i]
source["stat_energy_to"] = ft.get("stat_energy_to")
source["stat_compensation"] = ft.get("stat_compensation")
source["entity_energy_price_export"] = ft.get("entity_energy_price")
source["number_energy_price_export"] = ft.get("number_energy_price")
else:
source["stat_energy_to"] = None
source["stat_compensation"] = None
source["entity_energy_price_export"] = None
source["number_energy_price_export"] = None
# Power config at index i goes to grid connection at index i
if i < len(power_list):
power = power_list[i]
if "power_config" in power:
source["power_config"] = power["power_config"]
if "stat_rate" in power:
source["stat_rate"] = power["stat_rate"]
new_sources.append(source)
return new_sources
def _is_legacy_grid_format(source: dict[str, Any]) -> bool:
"""Check if a grid source is in the legacy format."""
return source.get("type") == "grid" and "flow_from" in source
class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
"""Energy preferences store with migration support."""
@@ -419,6 +623,18 @@ class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
if old_major_version == 1 and old_minor_version < 2:
# Add device_consumption_water field if it doesn't exist
data.setdefault("device_consumption_water", [])
if old_major_version == 1 and old_minor_version < 3:
# Migrate legacy grid format to unified format
new_sources: list[dict[str, Any]] = []
for source in data.get("energy_sources", []):
if _is_legacy_grid_format(source):
# Convert legacy grid to multiple unified grid sources
new_sources.extend(_migrate_legacy_grid_to_unified(source))
else:
new_sources.append(source)
data["energy_sources"] = new_sources
return data
@@ -516,27 +732,18 @@ class EnergyManager:
source: GridSourceType,
generate_entity_id: Callable[[str, PowerConfig], str],
) -> GridSourceType:
"""Set stat_rate for grid power sources if power_config is specified."""
if "power" not in source:
"""Set stat_rate for grid if power_config is specified."""
if "power_config" not in source:
return source
processed_power: list[GridPowerSourceType] = []
for power in source["power"]:
if "power_config" in power:
config = power["power_config"]
config = source["power_config"]
# If power_config has stat_rate (standard), just use it directly
if "stat_rate" in config:
processed_power.append({**power, "stat_rate": config["stat_rate"]})
else:
# For inverted or two-sensor config, set stat_rate to generated entity_id
processed_power.append(
{**power, "stat_rate": generate_entity_id("grid", config)}
)
else:
processed_power.append(power)
# If power_config has stat_rate (standard), just use it directly
if "stat_rate" in config:
return {**source, "stat_rate": config["stat_rate"]}
return {**source, "power": processed_power}
# For inverted or two-sensor config, set stat_rate to the generated entity_id
return {**source, "stat_rate": generate_entity_id("grid", config)}
@callback
def async_listen_updates(self, update_listener: Callable[[], Awaitable]) -> None:

View File

@@ -94,22 +94,15 @@ class SourceAdapter:
SOURCE_ADAPTERS: Final = (
# Grid import cost (unified format)
SourceAdapter(
"grid",
"flow_from",
None, # No flow_type - unified format
"stat_energy_from",
"stat_cost",
"Cost",
"cost",
),
SourceAdapter(
"grid",
"flow_to",
"stat_energy_to",
"stat_compensation",
"Compensation",
"compensation",
),
SourceAdapter(
"gas",
None,
@@ -128,6 +121,16 @@ SOURCE_ADAPTERS: Final = (
),
)
# Separate adapter for grid export compensation (needs different price field)
GRID_EXPORT_ADAPTER: Final = SourceAdapter(
"grid",
None, # No flow_type - unified format
"stat_energy_to",
"stat_compensation",
"Compensation",
"compensation",
)
class EntityNotFoundError(HomeAssistantError):
"""When a referenced entity was not found."""
@@ -183,22 +186,20 @@ class SensorManager:
if adapter.source_type != energy_source["type"]:
continue
if adapter.flow_type is None:
self._process_sensor_data(
adapter,
energy_source,
to_add,
to_remove,
)
continue
self._process_sensor_data(
adapter,
energy_source,
to_add,
to_remove,
)
for flow in energy_source[adapter.flow_type]: # type: ignore[typeddict-item]
self._process_sensor_data(
adapter,
flow,
to_add,
to_remove,
)
# Handle grid export compensation (unified format uses different price fields)
if energy_source["type"] == "grid":
self._process_grid_export_sensor(
energy_source,
to_add,
to_remove,
)
# Process power sensors for battery and grid sources
self._process_power_sensor_data(
@@ -222,11 +223,16 @@ class SensorManager:
if config.get(adapter.total_money_key) is not None:
return
key = (adapter.source_type, adapter.flow_type, config[adapter.stat_energy_key])
# Skip if the energy stat is not configured (e.g., export-only or power-only grids)
stat_energy = config.get(adapter.stat_energy_key)
if not stat_energy:
return
key = (adapter.source_type, adapter.flow_type, stat_energy)
# Make sure the right data is there
# If the entity existed, we don't pop it from to_remove so it's removed
if not valid_entity_id(config[adapter.stat_energy_key]) or (
if not valid_entity_id(stat_energy) or (
config.get("entity_energy_price") is None
and config.get("number_energy_price") is None
):
@@ -242,6 +248,56 @@ class SensorManager:
)
to_add.append(self.current_entities[key])
@callback
def _process_grid_export_sensor(
self,
config: Mapping[str, Any],
to_add: list[EnergyCostSensor | EnergyPowerSensor],
to_remove: dict[tuple[str, str | None, str], EnergyCostSensor],
) -> None:
"""Process grid export compensation sensor (unified format).
The unified grid format uses different field names for export pricing:
- entity_energy_price_export instead of entity_energy_price
- number_energy_price_export instead of number_energy_price
"""
# No export meter configured
stat_energy_to = config.get("stat_energy_to")
if stat_energy_to is None:
return
# Already have a compensation stat
if config.get("stat_compensation") is not None:
return
key = ("grid", None, stat_energy_to)
# Check for export pricing fields (different names in unified format)
if not valid_entity_id(stat_energy_to) or (
config.get("entity_energy_price_export") is None
and config.get("number_energy_price_export") is None
):
return
# Create a config wrapper that maps the sell price fields to standard names
# so EnergyCostSensor can use them
export_config: dict[str, Any] = {
"stat_energy_to": stat_energy_to,
"stat_compensation": config.get("stat_compensation"),
"entity_energy_price": config.get("entity_energy_price_export"),
"number_energy_price": config.get("number_energy_price_export"),
}
if current_entity := to_remove.pop(key, None):
current_entity.update_config(export_config)
return
self.current_entities[key] = EnergyCostSensor(
GRID_EXPORT_ADAPTER,
export_config,
)
to_add.append(self.current_entities[key])
@callback
def _process_power_sensor_data(
self,
@@ -252,21 +308,14 @@ class SensorManager:
"""Process power sensor data for battery and grid sources."""
source_type = energy_source.get("type")
if source_type == "battery":
if source_type in ("battery", "grid"):
# Both battery and grid now use unified format with power_config at top level
power_config = energy_source.get("power_config")
if power_config and self._needs_power_sensor(power_config):
self._create_or_keep_power_sensor(
source_type, power_config, to_add, to_remove
)
elif source_type == "grid":
for power in energy_source.get("power", []):
power_config = power.get("power_config")
if power_config and self._needs_power_sensor(power_config):
self._create_or_keep_power_sensor(
source_type, power_config, to_add, to_remove
)
@staticmethod
def _needs_power_sensor(power_config: PowerConfig) -> bool:
"""Check if power_config needs a transform sensor."""
@@ -312,6 +361,17 @@ class EnergyCostSensor(SensorEntity):
This is intended as a fallback for when no specific cost sensor is available for the
utility.
Expected config fields (from adapter or export_config wrapper):
- stat_energy_key (via adapter): Key to get the energy statistic ID
- total_money_key (via adapter): Key to get the existing cost/compensation stat
- entity_energy_price: Entity ID providing price per unit (e.g., $/kWh)
- number_energy_price: Fixed price per unit
Note: For grid export compensation, the unified format uses different field names
(entity_energy_price_export, number_energy_price_export). The _process_grid_export_sensor
method in SensorManager creates a wrapper config that maps these to the standard
field names (entity_energy_price, number_energy_price) so this class can use them.
"""
_attr_entity_registry_visible_default = False

View File

@@ -401,16 +401,20 @@ def _validate_grid_source(
source_result: ValidationIssues,
validate_calls: list[functools.partial[None]],
) -> None:
"""Validate grid energy source."""
flow_from: data.FlowFromGridSourceType
for flow_from in source["flow_from"]:
wanted_statistics_metadata.add(flow_from["stat_energy_from"])
"""Validate grid energy source (unified format)."""
stat_energy_from = source.get("stat_energy_from")
stat_energy_to = source.get("stat_energy_to")
stat_rate = source.get("stat_rate")
# Validate import meter (optional)
if stat_energy_from:
wanted_statistics_metadata.add(stat_energy_from)
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
flow_from["stat_energy_from"],
stat_energy_from,
ENERGY_USAGE_DEVICE_CLASSES,
ENERGY_USAGE_UNITS,
ENERGY_UNIT_ERROR,
@@ -418,7 +422,8 @@ def _validate_grid_source(
)
)
if (stat_cost := flow_from.get("stat_cost")) is not None:
# Validate import cost tracking (only if import meter exists)
if (stat_cost := source.get("stat_cost")) is not None:
wanted_statistics_metadata.add(stat_cost)
validate_calls.append(
functools.partial(
@@ -429,7 +434,7 @@ def _validate_grid_source(
source_result,
)
)
elif (entity_energy_price := flow_from.get("entity_energy_price")) is not None:
elif (entity_energy_price := source.get("entity_energy_price")) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
@@ -442,27 +447,27 @@ def _validate_grid_source(
)
if (
flow_from.get("entity_energy_price") is not None
or flow_from.get("number_energy_price") is not None
source.get("entity_energy_price") is not None
or source.get("number_energy_price") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
flow_from["stat_energy_from"],
stat_energy_from,
source_result,
)
)
flow_to: data.FlowToGridSourceType
for flow_to in source["flow_to"]:
wanted_statistics_metadata.add(flow_to["stat_energy_to"])
# Validate export meter (optional)
if stat_energy_to:
wanted_statistics_metadata.add(stat_energy_to)
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
flow_to["stat_energy_to"],
stat_energy_to,
ENERGY_USAGE_DEVICE_CLASSES,
ENERGY_USAGE_UNITS,
ENERGY_UNIT_ERROR,
@@ -470,7 +475,8 @@ def _validate_grid_source(
)
)
if (stat_compensation := flow_to.get("stat_compensation")) is not None:
# Validate export compensation tracking
if (stat_compensation := source.get("stat_compensation")) is not None:
wanted_statistics_metadata.add(stat_compensation)
validate_calls.append(
functools.partial(
@@ -481,12 +487,14 @@ def _validate_grid_source(
source_result,
)
)
elif (entity_energy_price := flow_to.get("entity_energy_price")) is not None:
elif (
entity_price_export := source.get("entity_energy_price_export")
) is not None:
validate_calls.append(
functools.partial(
_async_validate_price_entity,
hass,
entity_energy_price,
entity_price_export,
source_result,
ENERGY_PRICE_UNITS,
ENERGY_PRICE_UNIT_ERROR,
@@ -494,26 +502,27 @@ def _validate_grid_source(
)
if (
flow_to.get("entity_energy_price") is not None
or flow_to.get("number_energy_price") is not None
source.get("entity_energy_price_export") is not None
or source.get("number_energy_price_export") is not None
):
validate_calls.append(
functools.partial(
_async_validate_auto_generated_cost_entity,
hass,
flow_to["stat_energy_to"],
stat_energy_to,
source_result,
)
)
for power_stat in source.get("power", []):
wanted_statistics_metadata.add(power_stat["stat_rate"])
# Validate power sensor (optional)
if stat_rate:
wanted_statistics_metadata.add(stat_rate)
validate_calls.append(
functools.partial(
_async_validate_power_stat,
hass,
statistics_metadata,
power_stat["stat_rate"],
stat_rate,
POWER_USAGE_DEVICE_CLASSES,
POWER_USAGE_UNITS,
POWER_UNIT_ERROR,

View File

@@ -77,7 +77,7 @@ class FacebookNotificationService(BaseNotificationService):
"recipient": recipient,
"message": body_message,
"messaging_type": "MESSAGE_TAG",
"tag": "ACCOUNT_UPDATE",
"tag": "HUMAN_AGENT",
}
resp = requests.post(
BASE_URL,

View File

@@ -8,5 +8,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["googleapiclient"],
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.3"]
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==13.2.0"]
}

View File

@@ -51,31 +51,86 @@ class GoogleGenerativeAITextToSpeechEntity(
# Note the documentation might not be up to date, e.g. el-GR is not listed
# there but is supported.
_attr_supported_languages = [
"af-ZA",
"am-ET",
"ar-EG",
"az-AZ",
"be-BY",
"bg-BG",
"bn-BD",
"ca-ES",
"ceb-PH",
"cmn-CN",
"cs-CZ",
"da-DK",
"de-DE",
"el-GR",
"en-IN",
"en-US",
"es-ES",
"es-US",
"et-EE",
"eu-ES",
"fa-IR",
"fi-FI",
"fil-PH",
"fr-FR",
"gl-ES",
"gu-IN",
"he-IL",
"hi-IN",
"hr-HR",
"ht-HT",
"hu-HU",
"hy-AM",
"id-ID",
"is-IS",
"it-IT",
"ja-JP",
"jv-ID",
"ka-GE",
"kn-IN",
"ko-KR",
"kok-IN",
"la-VA",
"lb-LU",
"lo-LA",
"lt-LT",
"lv-LV",
"mai-IN",
"mg-MG",
"mk-MK",
"ml-IN",
"mn-MN",
"mr-IN",
"ms-MY",
"my-MM",
"nb-NO",
"ne-NP",
"nl-NL",
"nn-NO",
"or-IN",
"pa-IN",
"pl-PL",
"ps-AF",
"pt-BR",
"pt-PT",
"ro-RO",
"ru-RU",
"sd-PK",
"si-LK",
"sk-SK",
"sl-SI",
"sq-AL",
"sr-RS",
"sv-SE",
"sw-KE",
"ta-IN",
"te-IN",
"th-TH",
"tr-TR",
"uk-UA",
"ur-PK",
"vi-VN",
]
# Unused, but required by base class.

View File

@@ -164,10 +164,10 @@
"name": "Relay"
},
"tx0plus5": {
"name": "TX0 force +5v"
"name": "TX0 force +5V"
},
"tx1plus5": {
"name": "TX1 force +5v"
"name": "TX1 force +5V"
}
}
},

View File

@@ -50,6 +50,44 @@ class IntegrationNotFoundFlow(RepairsFlow):
)
class OrphanedConfigEntryFlow(RepairsFlow):
"""Handler for an issue fixing flow."""
def __init__(self, data: dict[str, str]) -> None:
"""Initialize."""
self.entry_id = data["entry_id"]
self.description_placeholders = data
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the first step of a fix flow."""
return self.async_show_menu(
step_id="init",
menu_options=["confirm", "ignore"],
description_placeholders=self.description_placeholders,
)
async def async_step_confirm(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the confirm step of a fix flow."""
await self.hass.config_entries.async_remove(self.entry_id)
return self.async_create_entry(data={})
async def async_step_ignore(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle the ignore step of a fix flow."""
ir.async_get(self.hass).async_ignore(
DOMAIN, f"orphaned_ignored_entry.{self.entry_id}", True
)
return self.async_abort(
reason="issue_ignored",
description_placeholders=self.description_placeholders,
)
async def async_create_fix_flow(
hass: HomeAssistant, issue_id: str, data: dict[str, str] | None
) -> RepairsFlow:
@@ -58,4 +96,7 @@ async def async_create_fix_flow(
if issue_id.split(".", maxsplit=1)[0] == "integration_not_found":
assert data
return IntegrationNotFoundFlow(data)
if issue_id.split(".", maxsplit=1)[0] == "orphaned_ignored_entry":
assert data
return OrphanedConfigEntryFlow(data)
return ConfirmRepairFlow()

View File

@@ -162,6 +162,24 @@
"description": "It's not possible to configure {platform} {domain} by adding `{platform_key}` to the {domain} configuration. Please check the documentation for more information on how to set up this integration.\n\nTo resolve this:\n1. Remove `{platform_key}` occurrences from the `{domain}:` configuration in your YAML configuration file.\n2. Restart Home Assistant.\n\nExample that should be removed:\n{yaml_example}",
"title": "Unused YAML configuration for the {platform} integration"
},
"orphaned_ignored_config_entry": {
"fix_flow": {
"abort": {
"issue_ignored": "Non-existent integration {domain} ignored."
},
"step": {
"init": {
"description": "There is an ignored orphaned config entry for the `{domain}` integration. This can happen when an integration is removed, but the config entry is still present in Home Assistant.\n\nTo resolve this, press **Remove** to clean up the orphaned entry.",
"menu_options": {
"confirm": "Remove",
"ignore": "Ignore"
},
"title": "[%key:component::homeassistant::issues::orphaned_ignored_config_entry::title%]"
}
}
},
"title": "Orphaned ignored config entry for {domain}"
},
"platform_only": {
"description": "The {domain} integration does not support configuration under its own key, it must be configured under its supported platforms.\n\nTo resolve this:\n\n1. Remove `{domain}:` from your YAML configuration file.\n\n2. Restart Home Assistant.",
"title": "The {domain} integration does not support YAML configuration under its own key"

View File

@@ -31,6 +31,7 @@ HOMEE_UNIT_TO_HA_UNIT = {
"n/a": None,
"text": None,
"%": PERCENTAGE,
"Lux": LIGHT_LUX,
"lx": LIGHT_LUX,
"klx": LIGHT_LUX,
"1/min": REVOLUTIONS_PER_MINUTE,

View File

@@ -30,6 +30,7 @@ from homematicip.device import (
PresenceDetectorIndoor,
RoomControlDeviceAnalog,
SmokeDetector,
SoilMoistureSensorInterface,
SwitchMeasuring,
TemperatureDifferenceSensor2,
TemperatureHumiditySensorDisplay,
@@ -285,6 +286,10 @@ def get_device_handlers(hap: HomematicipHAP) -> dict[type, Callable]:
EnergySensorsInterface: lambda device: _handle_energy_sensor_interface(
hap, device
),
SoilMoistureSensorInterface: lambda device: [
HomematicipSoilMoistureSensor(hap, device),
HomematicipSoilTemperatureSensor(hap, device),
],
}
@@ -622,6 +627,7 @@ class HomematicipAbsoluteHumiditySensor(HomematicipGenericEntity, SensorEntity):
_attr_device_class = SensorDeviceClass.ABSOLUTE_HUMIDITY
_attr_native_unit_of_measurement = CONCENTRATION_GRAMS_PER_CUBIC_METER
_attr_suggested_display_precision = 1
_attr_suggested_unit_of_measurement = CONCENTRATION_MILLIGRAMS_PER_CUBIC_METER
_attr_state_class = SensorStateClass.MEASUREMENT
@@ -636,7 +642,7 @@ class HomematicipAbsoluteHumiditySensor(HomematicipGenericEntity, SensorEntity):
if value is None or value == "":
return None
return round(value, 3)
return value
class HomematicipIlluminanceSensor(HomematicipGenericEntity, SensorEntity):
@@ -1031,6 +1037,48 @@ class HmipSmokeDetectorSensor(HomematicipGenericEntity, SensorEntity):
return self.entity_description.value_fn(self._device)
class HomematicipSoilMoistureSensor(HomematicipGenericEntity, SensorEntity):
"""Representation of the HomematicIP soil moisture sensor."""
_attr_device_class = SensorDeviceClass.MOISTURE
_attr_native_unit_of_measurement = PERCENTAGE
_attr_state_class = SensorStateClass.MEASUREMENT
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize the soil moisture sensor device."""
super().__init__(
hap, device, post="Soil Moisture", channel=1, is_multi_channel=True
)
@property
def native_value(self) -> int | None:
"""Return the state."""
if self.functional_channel is None:
return None
return self.functional_channel.soilMoisture
class HomematicipSoilTemperatureSensor(HomematicipGenericEntity, SensorEntity):
"""Representation of the HomematicIP soil temperature sensor."""
_attr_device_class = SensorDeviceClass.TEMPERATURE
_attr_native_unit_of_measurement = UnitOfTemperature.CELSIUS
_attr_state_class = SensorStateClass.MEASUREMENT
def __init__(self, hap: HomematicipHAP, device) -> None:
"""Initialize the soil temperature sensor device."""
super().__init__(
hap, device, post="Soil Temperature", channel=1, is_multi_channel=True
)
@property
def native_value(self) -> float | None:
"""Return the state."""
if self.functional_channel is None:
return None
return self.functional_channel.soilTemperature
def _get_wind_direction(wind_direction_degree: float) -> str:
"""Convert wind direction degree to named direction."""
if 11.25 <= wind_direction_degree < 33.75:

View File

@@ -6,7 +6,7 @@
"documentation": "https://www.home-assistant.io/integrations/homevolt",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"quality_scale": "silver",
"requirements": ["homevolt==0.4.4"],
"zeroconf": [
{

View File

@@ -33,13 +33,13 @@ rules:
docs-configuration-parameters:
status: exempt
comment: Integration does not have an options flow.
docs-installation-parameters: todo
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: todo
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: done
test-coverage: todo
test-coverage: done
# Gold
devices: done

View File

@@ -27,15 +27,36 @@ from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import AbortFlow
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import instance_id
from homeassistant.helpers.selector import TextSelector
from homeassistant.helpers.selector import (
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
TextSelector,
)
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import CONF_PRODUCT_NAME, CONF_PRODUCT_TYPE, CONF_SERIAL, DOMAIN, LOGGER
from .const import (
CONF_PRODUCT_NAME,
CONF_PRODUCT_TYPE,
CONF_SERIAL,
CONF_USAGE,
DOMAIN,
ENERGY_MONITORING_DEVICES,
LOGGER,
)
USAGE_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=["consumption", "generation"],
translation_key="usage",
mode=SelectSelectorMode.LIST,
)
)
class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for P1 meter."""
"""Handle a config flow for HomeWizard devices."""
VERSION = 1
@@ -43,6 +64,8 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
product_name: str | None = None
product_type: str | None = None
serial: str | None = None
token: str | None = None
usage: str | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
@@ -64,6 +87,12 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
f"{device_info.product_type}_{device_info.serial}"
)
self._abort_if_unique_id_configured(updates=user_input)
if device_info.product_type in ENERGY_MONITORING_DEVICES:
self.ip_address = user_input[CONF_IP_ADDRESS]
self.product_name = device_info.product_name
self.product_type = device_info.product_type
self.serial = device_info.serial
return await self.async_step_usage()
return self.async_create_entry(
title=f"{device_info.product_name}",
data=user_input,
@@ -82,6 +111,45 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
)
async def async_step_usage(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Step where we ask how the energy monitor is used."""
assert self.ip_address
assert self.product_name
assert self.product_type
assert self.serial
data: dict[str, Any] = {CONF_IP_ADDRESS: self.ip_address}
if self.token:
data[CONF_TOKEN] = self.token
if user_input is not None:
return self.async_create_entry(
title=f"{self.product_name}",
data=data | user_input,
)
return self.async_show_form(
step_id="usage",
data_schema=vol.Schema(
{
vol.Required(
CONF_USAGE,
default=user_input.get(CONF_USAGE)
if user_input is not None
else "consumption",
): USAGE_SELECTOR,
}
),
description_placeholders={
CONF_PRODUCT_NAME: self.product_name,
CONF_PRODUCT_TYPE: self.product_type,
CONF_SERIAL: self.serial,
CONF_IP_ADDRESS: self.ip_address,
},
)
async def async_step_authorize(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
@@ -101,8 +169,7 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
# Now we got a token, we can ask for some more info
async with HomeWizardEnergyV2(self.ip_address, token=token) as api:
device_info = await api.device()
device_info = await HomeWizardEnergyV2(self.ip_address, token=token).device()
data = {
CONF_IP_ADDRESS: self.ip_address,
@@ -113,6 +180,14 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
f"{device_info.product_type}_{device_info.serial}"
)
self._abort_if_unique_id_configured(updates=data)
self.product_name = device_info.product_name
self.product_type = device_info.product_type
self.serial = device_info.serial
if device_info.product_type in ENERGY_MONITORING_DEVICES:
self.token = token
return await self.async_step_usage()
return self.async_create_entry(
title=f"{device_info.product_name}",
data=data,
@@ -139,6 +214,8 @@ class HomeWizardConfigFlow(ConfigFlow, domain=DOMAIN):
self._abort_if_unique_id_configured(
updates={CONF_IP_ADDRESS: discovery_info.host}
)
if self.product_type in ENERGY_MONITORING_DEVICES:
return await self.async_step_usage()
return await self.async_step_discovery_confirm()

View File

@@ -5,6 +5,8 @@ from __future__ import annotations
from datetime import timedelta
import logging
from homewizard_energy.const import Model
from homeassistant.const import Platform
DOMAIN = "homewizard"
@@ -22,5 +24,14 @@ LOGGER = logging.getLogger(__package__)
CONF_PRODUCT_NAME = "product_name"
CONF_PRODUCT_TYPE = "product_type"
CONF_SERIAL = "serial"
CONF_USAGE = "usage"
UPDATE_INTERVAL = timedelta(seconds=5)
ENERGY_MONITORING_DEVICES = (
Model.ENERGY_SOCKET,
Model.ENERGY_METER_1_PHASE,
Model.ENERGY_METER_3_PHASE,
Model.ENERGY_METER_EASTRON_SDM230,
Model.ENERGY_METER_EASTRON_SDM630,
)

View File

@@ -39,7 +39,7 @@ from homeassistant.helpers.typing import StateType
from homeassistant.util.dt import utcnow
from homeassistant.util.variance import ignore_variance
from .const import DOMAIN
from .const import CONF_USAGE, DOMAIN, ENERGY_MONITORING_DEVICES
from .coordinator import HomeWizardConfigEntry, HWEnergyDeviceUpdateCoordinator
from .entity import HomeWizardEntity
@@ -267,15 +267,6 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
enabled_fn=lambda data: data.measurement.energy_export_t4_kwh != 0,
value_fn=lambda data: data.measurement.energy_export_t4_kwh or None,
),
HomeWizardSensorEntityDescription(
key="active_power_w",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
has_fn=lambda data: data.measurement.power_w is not None,
value_fn=lambda data: data.measurement.power_w,
),
HomeWizardSensorEntityDescription(
key="active_power_l1_w",
translation_key="active_power_phase_w",
@@ -700,22 +691,30 @@ async def async_setup_entry(
entry: HomeWizardConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Initialize sensors."""
# Initialize default sensors
"""Cleanup deleted entrity registry item."""
entities: list = [
HomeWizardSensorEntity(entry.runtime_data, description)
for description in SENSORS
if description.has_fn(entry.runtime_data.data)
]
active_power_sensor_description = HomeWizardSensorEntityDescription(
key="active_power_w",
native_unit_of_measurement=UnitOfPower.WATT,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=0,
entity_registry_enabled_default=(
entry.runtime_data.data.device.product_type != Model.BATTERY
and entry.data.get(CONF_USAGE, "consumption") == "consumption"
),
has_fn=lambda x: True,
value_fn=lambda data: data.measurement.power_w,
)
# Add optional production power sensor for supported energy monitoring devices
# or plug-in battery
if entry.runtime_data.data.device.product_type in (
Model.ENERGY_SOCKET,
Model.ENERGY_METER_1_PHASE,
Model.ENERGY_METER_3_PHASE,
Model.ENERGY_METER_EASTRON_SDM230,
Model.ENERGY_METER_EASTRON_SDM630,
*ENERGY_MONITORING_DEVICES,
Model.BATTERY,
):
active_prodution_power_sensor_description = HomeWizardSensorEntityDescription(
@@ -735,17 +734,27 @@ async def async_setup_entry(
is not None
and total_export > 0
)
or entry.data.get(CONF_USAGE, "consumption") == "generation"
),
has_fn=lambda x: True,
value_fn=lambda data: (
power_w * -1 if (power_w := data.measurement.power_w) else power_w
),
)
entities.append(
HomeWizardSensorEntity(
entry.runtime_data, active_prodution_power_sensor_description
entities.extend(
(
HomeWizardSensorEntity(
entry.runtime_data, active_power_sensor_description
),
HomeWizardSensorEntity(
entry.runtime_data, active_prodution_power_sensor_description
),
)
)
elif (data := entry.runtime_data.data) and data.measurement.power_w is not None:
entities.append(
HomeWizardSensorEntity(entry.runtime_data, active_power_sensor_description)
)
# Initialize external devices
measurement = entry.runtime_data.data.measurement

View File

@@ -41,6 +41,16 @@
},
"description": "Update configuration for {title}."
},
"usage": {
"data": {
"usage": "Usage"
},
"data_description": {
"usage": "This will enable either a power consumption or power production sensor the first time this device is set up."
},
"description": "What are you going to monitor with your {product_name} ({product_type} {serial} at {ip_address})?",
"title": "Usage"
},
"user": {
"data": {
"ip_address": "[%key:common::config_flow::data::ip%]"
@@ -199,5 +209,13 @@
},
"title": "Update the authentication method for {title}"
}
},
"selector": {
"usage": {
"options": {
"consumption": "Monitoring consumed energy",
"generation": "Monitoring generated energy"
}
}
}
}

View File

@@ -3,14 +3,18 @@
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import discovery
from homeassistant.helpers import config_validation as cv, discovery
from .const import DOMAIN
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up HTML5 from a config entry."""
await discovery.async_load_platform(
hass, Platform.NOTIFY, DOMAIN, dict(entry.data), {}
hass.async_create_task(
discovery.async_load_platform(
hass, Platform.NOTIFY, DOMAIN, dict(entry.data), {}
)
)
return True

View File

@@ -17,7 +17,6 @@ from homeassistant.const import CONF_NAME
from homeassistant.core import callback
from .const import ATTR_VAPID_EMAIL, ATTR_VAPID_PRV_KEY, ATTR_VAPID_PUB_KEY, DOMAIN
from .issues import async_create_html5_issue
def vapid_generate_private_key() -> str:
@@ -92,14 +91,3 @@ class HTML5ConfigFlow(ConfigFlow, domain=DOMAIN):
),
errors=errors,
)
async def async_step_import(
self: HTML5ConfigFlow, import_config: dict
) -> ConfigFlowResult:
"""Handle config import from yaml."""
_, flow_result = self._async_create_html5_entry(import_config)
if not flow_result:
async_create_html5_issue(self.hass, False)
return self.async_abort(reason="invalid_config")
async_create_html5_issue(self.hass, True)
return flow_result

View File

@@ -1,50 +0,0 @@
"""Issues utility for HTML5."""
import logging
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
SUCCESSFUL_IMPORT_TRANSLATION_KEY = "deprecated_yaml"
FAILED_IMPORT_TRANSLATION_KEY = "deprecated_yaml_import_issue"
INTEGRATION_TITLE = "HTML5 Push Notifications"
@callback
def async_create_html5_issue(hass: HomeAssistant, import_success: bool) -> None:
"""Create issues for HTML5."""
if import_success:
async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2025.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)
else:
async_create_issue(
hass,
DOMAIN,
f"deprecated_yaml_{DOMAIN}",
breaks_in_ha_version="2025.4.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue",
translation_placeholders={
"domain": DOMAIN,
"integration_title": INTEGRATION_TITLE,
},
)

View File

@@ -7,6 +7,6 @@
"documentation": "https://www.home-assistant.io/integrations/html5",
"iot_class": "cloud_push",
"loggers": ["http_ece", "py_vapid", "pywebpush"],
"requirements": ["pywebpush==1.14.1"],
"requirements": ["pywebpush==2.3.0", "py_vapid==1.9.4"],
"single_config_entry": true
}

View File

@@ -9,10 +9,11 @@ from http import HTTPStatus
import json
import logging
import time
from typing import Any
from typing import TYPE_CHECKING, Any, NotRequired, TypedDict, cast
from urllib.parse import urlparse
import uuid
from aiohttp import web
from aiohttp.hdrs import AUTHORIZATION
import jwt
from py_vapid import Vapid
@@ -27,18 +28,17 @@ from homeassistant.components.notify import (
ATTR_TARGET,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.components.websocket_api import ActiveConnection
from homeassistant.const import ATTR_NAME, URL_ROOT
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.json import save_json
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import ensure_unique_string
from homeassistant.util.json import JsonObjectType, load_json_object
from homeassistant.util.json import load_json_object
from .const import (
ATTR_VAPID_EMAIL,
@@ -47,23 +47,12 @@ from .const import (
DOMAIN,
SERVICE_DISMISS,
)
from .issues import async_create_html5_issue
_LOGGER = logging.getLogger(__name__)
REGISTRATIONS_FILE = "html5_push_registrations.conf"
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
{
vol.Optional("gcm_sender_id"): cv.string,
vol.Optional("gcm_api_key"): cv.string,
vol.Required(ATTR_VAPID_PUB_KEY): cv.string,
vol.Required(ATTR_VAPID_PRV_KEY): cv.string,
vol.Required(ATTR_VAPID_EMAIL): cv.string,
}
)
ATTR_SUBSCRIPTION = "subscription"
ATTR_BROWSER = "browser"
@@ -159,6 +148,29 @@ HTML5_SHOWNOTIFICATION_PARAMETERS = (
)
class Keys(TypedDict):
"""Types for keys."""
p256dh: str
auth: str
class Subscription(TypedDict):
"""Types for subscription."""
endpoint: str
expirationTime: int | None
keys: Keys
class Registration(TypedDict):
"""Types for registration."""
subscription: Subscription
browser: str
name: NotRequired[str]
async def async_get_service(
hass: HomeAssistant,
config: ConfigType,
@@ -166,17 +178,7 @@ async def async_get_service(
) -> HTML5NotificationService | None:
"""Get the HTML5 push notification service."""
if config:
existing_config_entry = hass.config_entries.async_entries(DOMAIN)
if existing_config_entry:
async_create_html5_issue(hass, True)
return None
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
)
)
return None
if discovery_info is None:
return None
@@ -184,11 +186,14 @@ async def async_get_service(
registrations = await hass.async_add_executor_job(_load_config, json_path)
vapid_pub_key = discovery_info[ATTR_VAPID_PUB_KEY]
vapid_prv_key = discovery_info[ATTR_VAPID_PRV_KEY]
vapid_email = discovery_info[ATTR_VAPID_EMAIL]
vapid_pub_key: str = discovery_info[ATTR_VAPID_PUB_KEY]
vapid_prv_key: str = discovery_info[ATTR_VAPID_PRV_KEY]
vapid_email: str = discovery_info[ATTR_VAPID_EMAIL]
def websocket_appkey(_hass, connection, msg):
@callback
def websocket_appkey(
_hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
) -> None:
connection.send_message(websocket_api.result_message(msg["id"], vapid_pub_key))
websocket_api.async_register_command(
@@ -203,10 +208,10 @@ async def async_get_service(
)
def _load_config(filename: str) -> JsonObjectType:
def _load_config(filename: str) -> dict[str, Registration]:
"""Load configuration."""
with suppress(HomeAssistantError):
return load_json_object(filename)
return cast(dict[str, Registration], load_json_object(filename))
return {}
@@ -216,19 +221,20 @@ class HTML5PushRegistrationView(HomeAssistantView):
url = "/api/notify.html5"
name = "api:notify.html5"
def __init__(self, registrations, json_path):
def __init__(self, registrations: dict[str, Registration], json_path: str) -> None:
"""Init HTML5PushRegistrationView."""
self.registrations = registrations
self.json_path = json_path
async def post(self, request):
async def post(self, request: web.Request) -> web.Response:
"""Accept the POST request for push registrations from a browser."""
try:
data = await request.json()
data: Registration = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTPStatus.BAD_REQUEST)
try:
data = REGISTER_SCHEMA(data)
data = cast(Registration, REGISTER_SCHEMA(data))
except vol.Invalid as ex:
return self.json_message(humanize_error(data, ex), HTTPStatus.BAD_REQUEST)
@@ -257,28 +263,32 @@ class HTML5PushRegistrationView(HomeAssistantView):
"Error saving registration.", HTTPStatus.INTERNAL_SERVER_ERROR
)
def find_registration_name(self, data, suggested=None):
def find_registration_name(
self,
data: Registration,
suggested: str | None = None,
):
"""Find a registration name matching data or generate a unique one."""
endpoint = data.get(ATTR_SUBSCRIPTION).get(ATTR_ENDPOINT)
endpoint = data["subscription"]["endpoint"]
for key, registration in self.registrations.items():
subscription = registration.get(ATTR_SUBSCRIPTION)
subscription = registration["subscription"]
if subscription.get(ATTR_ENDPOINT) == endpoint:
return key
return ensure_unique_string(suggested or "unnamed device", self.registrations)
async def delete(self, request):
async def delete(self, request: web.Request):
"""Delete a registration."""
try:
data = await request.json()
data: dict[str, Any] = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTPStatus.BAD_REQUEST)
subscription = data.get(ATTR_SUBSCRIPTION)
subscription: dict[str, Any] = data[ATTR_SUBSCRIPTION]
found = None
for key, registration in self.registrations.items():
if registration.get(ATTR_SUBSCRIPTION) == subscription:
if registration["subscription"] == subscription:
found = key
break
@@ -310,11 +320,11 @@ class HTML5PushCallbackView(HomeAssistantView):
url = "/api/notify.html5/callback"
name = "api:notify.html5/callback"
def __init__(self, registrations):
def __init__(self, registrations: dict[str, Registration]) -> None:
"""Init HTML5PushCallbackView."""
self.registrations = registrations
def decode_jwt(self, token):
def decode_jwt(self, token: str) -> web.Response | dict[str, Any]:
"""Find the registration that signed this JWT and return it."""
# 1. Check claims w/o verifying to see if a target is in there.
@@ -322,12 +332,12 @@ class HTML5PushCallbackView(HomeAssistantView):
# 2a. If decode is successful, return the payload.
# 2b. If decode is unsuccessful, return a 401.
target_check = jwt.decode(
target_check: dict[str, Any] = jwt.decode(
token, algorithms=["ES256", "HS256"], options={"verify_signature": False}
)
if target_check.get(ATTR_TARGET) in self.registrations:
possible_target = self.registrations[target_check[ATTR_TARGET]]
key = possible_target[ATTR_SUBSCRIPTION][ATTR_KEYS][ATTR_AUTH]
key = possible_target["subscription"]["keys"]["auth"]
with suppress(jwt.exceptions.DecodeError):
return jwt.decode(token, key, algorithms=["ES256", "HS256"])
@@ -337,7 +347,9 @@ class HTML5PushCallbackView(HomeAssistantView):
# The following is based on code from Auth0
# https://auth0.com/docs/quickstart/backend/python
def check_authorization_header(self, request):
def check_authorization_header(
self, request: web.Request
) -> web.Response | dict[str, Any]:
"""Check the authorization header."""
if not (auth := request.headers.get(AUTHORIZATION)):
return self.json_message(
@@ -366,18 +378,18 @@ class HTML5PushCallbackView(HomeAssistantView):
)
return payload
async def post(self, request):
async def post(self, request: web.Request) -> web.Response:
"""Accept the POST request for push registrations event callback."""
auth_check = self.check_authorization_header(request)
if not isinstance(auth_check, dict):
return auth_check
try:
data = await request.json()
data: dict[str, str] = await request.json()
except ValueError:
return self.json_message("Invalid JSON", HTTPStatus.BAD_REQUEST)
event_payload = {
event_payload: dict[str, Any] = {
ATTR_TAG: data.get(ATTR_TAG),
ATTR_TYPE: data[ATTR_TYPE],
ATTR_TARGET: auth_check[ATTR_TARGET],
@@ -405,7 +417,14 @@ class HTML5PushCallbackView(HomeAssistantView):
class HTML5NotificationService(BaseNotificationService):
"""Implement the notification service for HTML5."""
def __init__(self, hass, vapid_prv, vapid_email, registrations, json_path):
def __init__(
self,
hass: HomeAssistant,
vapid_prv: str,
vapid_email: str,
registrations: dict[str, Registration],
json_path: str,
) -> None:
"""Initialize the service."""
self._vapid_prv = vapid_prv
self._vapid_email = vapid_email
@@ -414,7 +433,7 @@ class HTML5NotificationService(BaseNotificationService):
async def async_dismiss_message(service: ServiceCall) -> None:
"""Handle dismissing notification message service calls."""
kwargs = {}
kwargs: dict[str, Any] = {}
if self.targets is not None:
kwargs[ATTR_TARGET] = self.targets
@@ -433,19 +452,19 @@ class HTML5NotificationService(BaseNotificationService):
)
@property
def targets(self):
def targets(self) -> dict[str, str]:
"""Return a dictionary of registered targets."""
return {registration: registration for registration in self.registrations}
def dismiss(self, **kwargs):
def dismiss(self, **kwargs: Any) -> None:
"""Dismisses a notification."""
data = kwargs.get(ATTR_DATA)
tag = data.get(ATTR_TAG) if data else ""
data: dict[str, Any] | None = kwargs.get(ATTR_DATA)
tag: str = data.get(ATTR_TAG, "") if data else ""
payload = {ATTR_TAG: tag, ATTR_DISMISS: True, ATTR_DATA: {}}
self._push_message(payload, **kwargs)
async def async_dismiss(self, **kwargs):
async def async_dismiss(self, **kwargs) -> None:
"""Dismisses a notification.
This method must be run in the event loop.
@@ -455,7 +474,7 @@ class HTML5NotificationService(BaseNotificationService):
def send_message(self, message: str = "", **kwargs: Any) -> None:
"""Send a message to a user."""
tag = str(uuid.uuid4())
payload = {
payload: dict[str, Any] = {
"badge": "/static/images/notification-badge.png",
"body": message,
ATTR_DATA: {},
@@ -463,12 +482,12 @@ class HTML5NotificationService(BaseNotificationService):
ATTR_TAG: tag,
ATTR_TITLE: kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
}
if data := kwargs.get(ATTR_DATA):
data: dict[str, Any] | None = kwargs.get(ATTR_DATA)
if data:
# Pick out fields that should go into the notification directly vs
# into the notification data dictionary.
data_tmp = {}
data_tmp: dict[str, Any] = {}
for key, val in data.items():
if key in HTML5_SHOWNOTIFICATION_PARAMETERS:
@@ -486,12 +505,12 @@ class HTML5NotificationService(BaseNotificationService):
self._push_message(payload, **kwargs)
def _push_message(self, payload, **kwargs):
def _push_message(self, payload: dict[str, Any], **kwargs: Any) -> None:
"""Send the message."""
timestamp = int(time.time())
ttl = int(kwargs.get(ATTR_TTL, DEFAULT_TTL))
priority = kwargs.get(ATTR_PRIORITY, DEFAULT_PRIORITY)
priority: str = kwargs.get(ATTR_PRIORITY, DEFAULT_PRIORITY)
if priority not in ["normal", "high"]:
priority = DEFAULT_PRIORITY
payload["timestamp"] = timestamp * 1000 # Javascript ms since epoch
@@ -502,22 +521,23 @@ class HTML5NotificationService(BaseNotificationService):
for target in list(targets):
info = self.registrations.get(target)
try:
info = REGISTER_SCHEMA(info)
info = cast(Registration, REGISTER_SCHEMA(info))
except vol.Invalid:
_LOGGER.error(
"%s is not a valid HTML5 push notification target", target
)
continue
subscription = info[ATTR_SUBSCRIPTION]
subscription = info["subscription"]
payload[ATTR_DATA][ATTR_JWT] = add_jwt(
timestamp,
target,
payload[ATTR_TAG],
subscription[ATTR_KEYS][ATTR_AUTH],
subscription["keys"]["auth"],
)
webpusher = WebPusher(info[ATTR_SUBSCRIPTION])
endpoint = urlparse(subscription[ATTR_ENDPOINT])
webpusher = WebPusher(cast(dict[str, Any], info["subscription"]))
endpoint = urlparse(subscription["endpoint"])
vapid_claims = {
"sub": f"mailto:{self._vapid_email}",
"aud": f"{endpoint.scheme}://{endpoint.netloc}",
@@ -529,7 +549,10 @@ class HTML5NotificationService(BaseNotificationService):
data=json.dumps(payload), headers=vapid_headers, ttl=ttl
)
if response.status_code == 410:
if TYPE_CHECKING:
assert not isinstance(response, str)
if response.status_code == HTTPStatus.GONE:
_LOGGER.info("Notification channel has expired")
reg = self.registrations.pop(target)
try:
@@ -539,7 +562,7 @@ class HTML5NotificationService(BaseNotificationService):
_LOGGER.error("Error saving registration")
else:
_LOGGER.info("Configuration saved")
elif response.status_code > 399:
elif response.status_code >= HTTPStatus.BAD_REQUEST:
_LOGGER.error(
"There was an issue sending the notification %s: %s",
response.status_code,
@@ -547,7 +570,7 @@ class HTML5NotificationService(BaseNotificationService):
)
def add_jwt(timestamp, target, tag, jwt_secret):
def add_jwt(timestamp: int, target: str, tag: str, jwt_secret: str) -> str:
"""Create JWT json to put into payload."""
jwt_exp = datetime.fromtimestamp(timestamp) + timedelta(days=JWT_VALID_DAYS)

View File

@@ -7,11 +7,16 @@ incorrect behavior, and are thus not wanted in the demo integration.
from __future__ import annotations
import datetime
from functools import partial
from random import random
import voluptuous as vol
from homeassistant.components.labs import async_is_preview_feature_enabled, async_listen
from homeassistant.components.labs import (
EventLabsUpdatedData,
async_is_preview_feature_enabled,
async_subscribe_preview_feature,
)
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
from homeassistant.components.recorder.models import (
StatisticData,
@@ -128,16 +133,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# Subscribe to labs feature updates for kitchen_sink preview repair
entry.async_on_unload(
async_listen(
async_subscribe_preview_feature(
hass,
domain=DOMAIN,
preview_feature="special_repair",
listener=lambda: _async_update_special_repair(hass),
listener=partial(_async_update_special_repair, hass),
)
)
# Check if lab feature is currently enabled and create repair if so
_async_update_special_repair(hass)
await _async_update_special_repair(hass)
return True
@@ -166,15 +171,22 @@ async def async_remove_config_entry_device(
return True
@callback
def _async_update_special_repair(hass: HomeAssistant) -> None:
async def _async_update_special_repair(
hass: HomeAssistant,
event_data: EventLabsUpdatedData | None = None,
) -> None:
"""Create or delete the special repair issue.
Creates a repair issue when the special_repair lab feature is enabled,
and deletes it when disabled. This demonstrates how lab features can interact
with Home Assistant's repair system.
"""
if async_is_preview_feature_enabled(hass, DOMAIN, "special_repair"):
enabled = (
event_data["enabled"]
if event_data is not None
else async_is_preview_feature_enabled(hass, DOMAIN, "special_repair")
)
if enabled:
async_create_issue(
hass,
DOMAIN,

View File

@@ -11,7 +11,7 @@
"loggers": ["xknx", "xknxproject"],
"quality_scale": "platinum",
"requirements": [
"xknx==3.14.0",
"xknx==3.15.0",
"xknxproject==3.8.2",
"knx-frontend==2026.2.13.222258"
],

View File

@@ -22,7 +22,7 @@ from homeassistant.components.cover import (
)
from homeassistant.components.number import NumberMode
from homeassistant.components.sensor import (
CONF_STATE_CLASS,
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
DEVICE_CLASSES_SCHEMA as SENSOR_DEVICE_CLASSES_SCHEMA,
STATE_CLASSES_SCHEMA,
)
@@ -64,6 +64,7 @@ from .const import (
NumberConf,
SceneConf,
)
from .dpt import get_supported_dpts
from .validation import (
backwards_compatible_xknx_climate_enum_member,
dpt_base_type_validator,
@@ -74,6 +75,7 @@ from .validation import (
string_type_validator,
sync_state_validator,
validate_number_attributes,
validate_sensor_attributes,
)
@@ -143,6 +145,13 @@ def select_options_sub_validator(entity_config: OrderedDict) -> OrderedDict:
return entity_config
def _sensor_attribute_sub_validator(config: dict) -> dict:
"""Validate that state_class is compatible with device_class and unit_of_measurement."""
transcoder: type[DPTBase] = DPTBase.parse_transcoder(config[CONF_TYPE]) # type: ignore[assignment] # already checked in sensor_type_validator
dpt_metadata = get_supported_dpts()[transcoder.dpt_number_str()]
return validate_sensor_attributes(dpt_metadata, config)
#########
# EVENT
#########
@@ -848,17 +857,20 @@ class SensorSchema(KNXPlatformSchema):
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Required(CONF_TYPE): sensor_type_validator,
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
ENTITY_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): sync_state_validator,
vol.Optional(CONF_ALWAYS_CALLBACK, default=False): cv.boolean,
vol.Optional(CONF_SENSOR_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Required(CONF_TYPE): sensor_type_validator,
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
}
),
_sensor_attribute_sub_validator,
)

View File

@@ -213,18 +213,22 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
value_type=config[CONF_TYPE],
),
)
dpt_string = self._device.sensor_value.dpt_class.dpt_number_str()
dpt_info = get_supported_dpts()[dpt_string]
if device_class := config.get(CONF_DEVICE_CLASS):
self._attr_device_class = device_class
else:
self._attr_device_class = try_parse_enum(
SensorDeviceClass, self._device.ha_device_class()
)
self._attr_device_class = dpt_info["sensor_device_class"]
self._attr_state_class = (
config.get(CONF_STATE_CLASS) or dpt_info["sensor_state_class"]
)
self._attr_native_unit_of_measurement = dpt_info["unit"]
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
self._attr_native_unit_of_measurement = self._device.unit_of_measurement()
self._attr_state_class = config.get(CONF_STATE_CLASS)
self._attr_extra_state_attributes = {}

View File

@@ -13,9 +13,7 @@ from homeassistant.components.number import (
)
from homeassistant.components.sensor import (
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
DEVICE_CLASS_STATE_CLASSES,
DEVICE_CLASS_UNITS as SENSOR_DEVICE_CLASS_UNITS,
STATE_CLASS_UNITS,
SensorDeviceClass,
SensorStateClass,
)
@@ -52,7 +50,7 @@ from ..const import (
SceneConf,
)
from ..dpt import get_supported_dpts
from ..validation import validate_number_attributes
from ..validation import validate_number_attributes, validate_sensor_attributes
from .const import (
CONF_ALWAYS_CALLBACK,
CONF_COLOR,
@@ -684,62 +682,11 @@ CLIMATE_KNX_SCHEMA = vol.Schema(
)
def _validate_sensor_attributes(config: dict) -> dict:
def _sensor_attribute_sub_validator(config: dict) -> dict:
"""Validate that state_class is compatible with device_class and unit_of_measurement."""
dpt = config[CONF_GA_SENSOR][CONF_DPT]
dpt_metadata = get_supported_dpts()[dpt]
state_class = config.get(
CONF_SENSOR_STATE_CLASS,
dpt_metadata["sensor_state_class"],
)
device_class = config.get(
CONF_DEVICE_CLASS,
dpt_metadata["sensor_device_class"],
)
unit_of_measurement = config.get(
CONF_UNIT_OF_MEASUREMENT,
dpt_metadata["unit"],
)
if (
state_class
and device_class
and (state_classes := DEVICE_CLASS_STATE_CLASSES.get(device_class)) is not None
and state_class not in state_classes
):
raise vol.Invalid(
f"State class '{state_class}' is not valid for device class '{device_class}'. "
f"Valid options are: {', '.join(sorted(map(str, state_classes), key=str.casefold))}",
path=[CONF_SENSOR_STATE_CLASS],
)
if (
device_class
and (d_c_units := SENSOR_DEVICE_CLASS_UNITS.get(device_class)) is not None
and unit_of_measurement not in d_c_units
):
raise vol.Invalid(
f"Unit of measurement '{unit_of_measurement}' is not valid for device class '{device_class}'. "
f"Valid options are: {', '.join(sorted(map(str, d_c_units), key=str.casefold))}",
path=(
[CONF_DEVICE_CLASS]
if CONF_DEVICE_CLASS in config
else [CONF_UNIT_OF_MEASUREMENT]
),
)
if (
state_class
and (s_c_units := STATE_CLASS_UNITS.get(state_class)) is not None
and unit_of_measurement not in s_c_units
):
raise vol.Invalid(
f"Unit of measurement '{unit_of_measurement}' is not valid for state class '{state_class}'. "
f"Valid options are: {', '.join(sorted(map(str, s_c_units), key=str.casefold))}",
path=(
[CONF_SENSOR_STATE_CLASS]
if CONF_SENSOR_STATE_CLASS in config
else [CONF_UNIT_OF_MEASUREMENT]
),
)
return config
return validate_sensor_attributes(dpt_metadata, config)
SENSOR_KNX_SCHEMA = AllSerializeFirst(
@@ -788,7 +735,7 @@ SENSOR_KNX_SCHEMA = AllSerializeFirst(
),
},
),
_validate_sensor_attributes,
_sensor_attribute_sub_validator,
)
KNX_SCHEMA_FOR_PLATFORM = {

View File

@@ -45,6 +45,7 @@ class TelegramDict(DecodedTelegramPayload):
"""Represent a Telegram as a dict."""
# this has to be in sync with the frontend implementation
data_secure: bool | None
destination: str
destination_name: str
direction: str
@@ -153,6 +154,7 @@ class Telegrams:
value = _serializable_decoded_data(telegram.decoded_data.value)
return TelegramDict(
data_secure=telegram.data_secure,
destination=f"{telegram.destination_address}",
destination_name=dst_name,
direction=telegram.direction.value,

View File

@@ -14,11 +14,17 @@ from xknx.telegram.address import IndividualAddress, parse_device_group_address
from homeassistant.components.number import (
DEVICE_CLASS_UNITS as NUMBER_DEVICE_CLASS_UNITS,
)
from homeassistant.components.sensor import (
CONF_STATE_CLASS as CONF_SENSOR_STATE_CLASS,
DEVICE_CLASS_STATE_CLASSES,
DEVICE_CLASS_UNITS,
STATE_CLASS_UNITS,
)
from homeassistant.const import CONF_DEVICE_CLASS, CONF_UNIT_OF_MEASUREMENT
from homeassistant.helpers import config_validation as cv
from .const import NumberConf
from .dpt import get_supported_dpts
from .dpt import DPTInfo, get_supported_dpts
def dpt_subclass_validator(dpt_base_class: type[DPTBase]) -> Callable[[Any], str | int]:
@@ -219,3 +225,65 @@ def validate_number_attributes(
)
return config
def validate_sensor_attributes(
dpt_info: DPTInfo, config: dict[str, Any]
) -> dict[str, Any]:
"""Validate that state_class is compatible with device_class and unit_of_measurement.
Works for both, UI and YAML configuration schema since they
share same names for all tested attributes.
"""
state_class = config.get(
CONF_SENSOR_STATE_CLASS,
dpt_info["sensor_state_class"],
)
device_class = config.get(
CONF_DEVICE_CLASS,
dpt_info["sensor_device_class"],
)
unit_of_measurement = config.get(
CONF_UNIT_OF_MEASUREMENT,
dpt_info["unit"],
)
if (
state_class
and device_class
and (state_classes := DEVICE_CLASS_STATE_CLASSES.get(device_class)) is not None
and state_class not in state_classes
):
raise vol.Invalid(
f"State class '{state_class}' is not valid for device class '{device_class}'. "
f"Valid options are: {', '.join(sorted(map(str, state_classes), key=str.casefold))}",
path=[CONF_SENSOR_STATE_CLASS],
)
if (
device_class
and (d_c_units := DEVICE_CLASS_UNITS.get(device_class)) is not None
and unit_of_measurement not in d_c_units
):
raise vol.Invalid(
f"Unit of measurement '{unit_of_measurement}' is not valid for device class '{device_class}'. "
f"Valid options are: {', '.join(sorted(map(str, d_c_units), key=str.casefold))}",
path=(
[CONF_DEVICE_CLASS]
if CONF_DEVICE_CLASS in config
else [CONF_UNIT_OF_MEASUREMENT]
),
)
if (
state_class
and (s_c_units := STATE_CLASS_UNITS.get(state_class)) is not None
and unit_of_measurement not in s_c_units
):
raise vol.Invalid(
f"Unit of measurement '{unit_of_measurement}' is not valid for state class '{state_class}'. "
f"Valid options are: {', '.join(sorted(map(str, s_c_units), key=str.casefold))}",
path=(
[CONF_SENSOR_STATE_CLASS]
if CONF_SENSOR_STATE_CLASS in config
else [CONF_UNIT_OF_MEASUREMENT]
),
)
return config

View File

@@ -7,6 +7,7 @@ from typing import Any
from homeassistant.const import EVENT_LABS_UPDATED
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.helpers.frame import report_usage
from .const import LABS_DATA
from .models import EventLabsUpdatedData
@@ -79,6 +80,8 @@ def async_listen(
) -> Callable[[], None]:
"""Listen for changes to a specific preview feature.
Deprecated: use async_subscribe_preview_feature instead.
Args:
hass: HomeAssistant instance
domain: Integration domain
@@ -88,6 +91,11 @@ def async_listen(
Returns:
Callable to unsubscribe from the listener
"""
report_usage(
"calls `async_listen` which is deprecated, "
"use `async_subscribe_preview_feature` instead",
breaks_in_ha_version="2027.3.0",
)
async def _listener(_event_data: EventLabsUpdatedData) -> None:
listener()

View File

@@ -10,5 +10,5 @@
"iot_class": "local_polling",
"loggers": ["pypck"],
"quality_scale": "silver",
"requirements": ["pypck==0.9.10", "lcn-frontend==0.2.7"]
"requirements": ["pypck==0.9.11", "lcn-frontend==0.2.7"]
}

View File

@@ -15,11 +15,7 @@ from pyliebherrhomeapi import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
@@ -57,7 +53,7 @@ class LiebherrCoordinator(DataUpdateCoordinator[DeviceState]):
try:
await self.client.get_device(self.device_id)
except LiebherrAuthenticationError as err:
raise ConfigEntryError("Invalid API key") from err
raise ConfigEntryAuthFailed("Invalid API key") from err
except LiebherrConnectionError as err:
raise ConfigEntryNotReady(
f"Failed to connect to device {self.device_id}: {err}"

View File

@@ -7,5 +7,5 @@
"documentation": "https://www.home-assistant.io/integrations/local_calendar",
"iot_class": "local_polling",
"loggers": ["ical"],
"requirements": ["ical==12.1.3"]
"requirements": ["ical==13.2.0"]
}

View File

@@ -5,5 +5,5 @@
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/local_todo",
"iot_class": "local_polling",
"requirements": ["ical==12.1.3"]
"requirements": ["ical==13.2.0"]
}

View File

@@ -9,6 +9,7 @@ from mastodon.Mastodon import (
Mastodon,
MastodonError,
MastodonNotFoundError,
MastodonUnauthorizedError,
)
from homeassistant.const import (
@@ -18,7 +19,7 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import slugify
@@ -48,6 +49,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: MastodonConfigEntry) ->
entry,
)
except MastodonUnauthorizedError as error:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_failed",
) from error
except MastodonError as ex:
raise ConfigEntryNotReady("Failed to connect") from ex

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from mastodon.Mastodon import (
@@ -43,6 +44,28 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
}
)
REAUTH_SCHEMA = vol.Schema(
{
vol.Required(
CONF_ACCESS_TOKEN,
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
}
)
STEP_RECONFIGURE_SCHEMA = vol.Schema(
{
vol.Required(
CONF_CLIENT_ID,
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
vol.Required(
CONF_CLIENT_SECRET,
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
vol.Required(
CONF_ACCESS_TOKEN,
): TextSelector(TextSelectorConfig(type=TextSelectorType.PASSWORD)),
}
)
EXAMPLE_URL = "https://mastodon.social"
def base_url_from_url(url: str) -> str:
@@ -50,18 +73,26 @@ def base_url_from_url(url: str) -> str:
return str(URL(url).origin())
def remove_email_link(account_name: str) -> str:
"""Remove email link from account name."""
# Replaces the @ with a HTML entity to prevent mailto links.
return account_name.replace("@", "&#64;")
class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
VERSION = 1
MINOR_VERSION = 2
base_url: str
client_id: str
client_secret: str
access_token: str
def check_connection(
self,
base_url: str,
client_id: str,
client_secret: str,
access_token: str,
) -> tuple[
InstanceV2 | Instance | None,
Account | None,
@@ -70,10 +101,10 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
"""Check connection to the Mastodon instance."""
try:
client = create_mastodon_client(
base_url,
client_id,
client_secret,
access_token,
self.base_url,
self.client_id,
self.client_secret,
self.access_token,
)
try:
instance = client.instance_v2()
@@ -117,12 +148,13 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
if user_input:
user_input[CONF_BASE_URL] = base_url_from_url(user_input[CONF_BASE_URL])
self.base_url = user_input[CONF_BASE_URL]
self.client_id = user_input[CONF_CLIENT_ID]
self.client_secret = user_input[CONF_CLIENT_SECRET]
self.access_token = user_input[CONF_ACCESS_TOKEN]
instance, account, errors = await self.hass.async_add_executor_job(
self.check_connection,
user_input[CONF_BASE_URL],
user_input[CONF_CLIENT_ID],
user_input[CONF_CLIENT_SECRET],
user_input[CONF_ACCESS_TOKEN],
self.check_connection
)
if not errors:
@@ -137,5 +169,81 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN):
return self.show_user_form(
user_input,
errors,
description_placeholders={"example_url": "https://mastodon.social"},
description_placeholders={"example_url": EXAMPLE_URL},
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
self.base_url = entry_data[CONF_BASE_URL]
self.client_id = entry_data[CONF_CLIENT_ID]
self.client_secret = entry_data[CONF_CLIENT_SECRET]
self.access_token = entry_data[CONF_ACCESS_TOKEN]
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauth dialog."""
errors: dict[str, str] = {}
if user_input:
self.access_token = user_input[CONF_ACCESS_TOKEN]
instance, account, errors = await self.hass.async_add_executor_job(
self.check_connection
)
if not errors:
name = construct_mastodon_username(instance, account)
await self.async_set_unique_id(slugify(name))
self._abort_if_unique_id_mismatch(reason="wrong_account")
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates={CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN]},
)
account_name = self._get_reauth_entry().title
return self.async_show_form(
step_id="reauth_confirm",
data_schema=REAUTH_SCHEMA,
errors=errors,
description_placeholders={
"account_name": remove_email_link(account_name),
},
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input:
self.base_url = reconfigure_entry.data[CONF_BASE_URL]
self.client_id = user_input[CONF_CLIENT_ID]
self.client_secret = user_input[CONF_CLIENT_SECRET]
self.access_token = user_input[CONF_ACCESS_TOKEN]
instance, account, errors = await self.hass.async_add_executor_job(
self.check_connection
)
if not errors:
name = construct_mastodon_username(instance, account)
await self.async_set_unique_id(slugify(name))
self._abort_if_unique_id_mismatch(reason="wrong_account")
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={
CONF_CLIENT_ID: user_input[CONF_CLIENT_ID],
CONF_CLIENT_SECRET: user_input[CONF_CLIENT_SECRET],
CONF_ACCESS_TOKEN: user_input[CONF_ACCESS_TOKEN],
},
)
account_name = reconfigure_entry.title
return self.async_show_form(
step_id="reconfigure",
data_schema=STEP_RECONFIGURE_SCHEMA,
errors=errors,
description_placeholders={
"account_name": remove_email_link(account_name),
},
)

View File

@@ -12,6 +12,7 @@ DATA_HASS_CONFIG = "mastodon_hass_config"
DEFAULT_URL: Final = "https://mastodon.social"
DEFAULT_NAME: Final = "Mastodon"
ATTR_ACCOUNT_NAME = "account_name"
ATTR_STATUS = "status"
ATTR_VISIBILITY = "visibility"
ATTR_IDEMPOTENCY_KEY = "idempotency_key"

View File

@@ -32,6 +32,9 @@
}
},
"services": {
"get_account": {
"service": "mdi:account-search"
},
"post": {
"service": "mdi:message-text"
}

View File

@@ -7,6 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["mastodon"],
"quality_scale": "bronze",
"quality_scale": "silver",
"requirements": ["Mastodon.py==2.1.2"]
}

View File

@@ -34,10 +34,7 @@ rules:
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: todo
comment: |
Waiting to move to oAuth.
reauthentication-flow: done
test-coverage: done
# Gold
devices: done
@@ -67,10 +64,7 @@ rules:
entity-translations: done
exception-translations: done
icon-translations: done
reconfiguration-flow:
status: todo
comment: |
Waiting to move to OAuth.
reconfiguration-flow: done
repair-issues: done
stale-devices:
status: exempt

View File

@@ -5,15 +5,22 @@ from functools import partial
from typing import Any
from mastodon import Mastodon
from mastodon.Mastodon import MastodonAPIError, MediaAttachment
from mastodon.Mastodon import Account, MastodonAPIError, MediaAttachment
import voluptuous as vol
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
from homeassistant.core import HomeAssistant, ServiceCall, ServiceResponse, callback
from homeassistant.core import (
HomeAssistant,
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import service
from .const import (
ATTR_ACCOUNT_NAME,
ATTR_CONTENT_WARNING,
ATTR_IDEMPOTENCY_KEY,
ATTR_LANGUAGE,
@@ -37,6 +44,13 @@ class StatusVisibility(StrEnum):
DIRECT = "direct"
SERVICE_GET_ACCOUNT = "get_account"
SERVICE_GET_ACCOUNT_SCHEMA = vol.Schema(
{
vol.Required(ATTR_CONFIG_ENTRY_ID): str,
vol.Required(ATTR_ACCOUNT_NAME): str,
}
)
SERVICE_POST = "post"
SERVICE_POST_SCHEMA = vol.Schema(
{
@@ -56,95 +70,127 @@ SERVICE_POST_SCHEMA = vol.Schema(
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up the services for the Mastodon integration."""
hass.services.async_register(
DOMAIN,
SERVICE_GET_ACCOUNT,
_async_get_account,
schema=SERVICE_GET_ACCOUNT_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
hass.services.async_register(
DOMAIN, SERVICE_POST, _async_post, schema=SERVICE_POST_SCHEMA
)
async def async_post(call: ServiceCall) -> ServiceResponse:
"""Post a status."""
entry: MastodonConfigEntry = service.async_get_config_entry(
hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
async def _async_get_account(call: ServiceCall) -> ServiceResponse:
"""Get account information."""
entry: MastodonConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
)
client = entry.runtime_data.client
account_name: str = call.data[ATTR_ACCOUNT_NAME]
try:
account: Account = await call.hass.async_add_executor_job(
partial(client.account_lookup, acct=account_name)
)
client = entry.runtime_data.client
except MastodonAPIError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unable_to_get_account",
translation_placeholders={"account_name": account_name},
) from err
status: str = call.data[ATTR_STATUS]
return {"account": account}
visibility: str | None = (
StatusVisibility(call.data[ATTR_VISIBILITY])
if ATTR_VISIBILITY in call.data
else None
async def _async_post(call: ServiceCall) -> ServiceResponse:
"""Post a status."""
entry: MastodonConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY_ID]
)
client = entry.runtime_data.client
status: str = call.data[ATTR_STATUS]
visibility: str | None = (
StatusVisibility(call.data[ATTR_VISIBILITY])
if ATTR_VISIBILITY in call.data
else None
)
idempotency_key: str | None = call.data.get(ATTR_IDEMPOTENCY_KEY)
spoiler_text: str | None = call.data.get(ATTR_CONTENT_WARNING)
language: str | None = call.data.get(ATTR_LANGUAGE)
media_path: str | None = call.data.get(ATTR_MEDIA)
media_description: str | None = call.data.get(ATTR_MEDIA_DESCRIPTION)
media_warning: str | None = call.data.get(ATTR_MEDIA_WARNING)
if idempotency_key and len(idempotency_key) < 4:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="idempotency_key_too_short",
)
idempotency_key: str | None = call.data.get(ATTR_IDEMPOTENCY_KEY)
spoiler_text: str | None = call.data.get(ATTR_CONTENT_WARNING)
language: str | None = call.data.get(ATTR_LANGUAGE)
media_path: str | None = call.data.get(ATTR_MEDIA)
media_description: str | None = call.data.get(ATTR_MEDIA_DESCRIPTION)
media_warning: str | None = call.data.get(ATTR_MEDIA_WARNING)
if idempotency_key and len(idempotency_key) < 4:
raise ServiceValidationError(
await call.hass.async_add_executor_job(
partial(
_post,
hass=call.hass,
client=client,
status=status,
visibility=visibility,
idempotency_key=idempotency_key,
spoiler_text=spoiler_text,
language=language,
media_path=media_path,
media_description=media_description,
sensitive=media_warning,
)
)
return None
def _post(hass: HomeAssistant, client: Mastodon, **kwargs: Any) -> None:
"""Post to Mastodon."""
media_data: MediaAttachment | None = None
media_path = kwargs.get("media_path")
if media_path:
if not hass.config.is_allowed_path(media_path):
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="idempotency_key_too_short",
translation_key="not_whitelisted_directory",
translation_placeholders={"media": media_path},
)
await hass.async_add_executor_job(
partial(
_post,
client=client,
status=status,
visibility=visibility,
idempotency_key=idempotency_key,
spoiler_text=spoiler_text,
language=language,
media_path=media_path,
media_description=media_description,
sensitive=media_warning,
)
)
return None
def _post(client: Mastodon, **kwargs: Any) -> None:
"""Post to Mastodon."""
media_data: MediaAttachment | None = None
media_path = kwargs.get("media_path")
if media_path:
if not hass.config.is_allowed_path(media_path):
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="not_whitelisted_directory",
translation_placeholders={"media": media_path},
)
media_type = get_media_type(media_path)
media_description = kwargs.get("media_description")
try:
media_data = client.media_post(
media_file=media_path,
mime_type=media_type,
description=media_description,
)
except MastodonAPIError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unable_to_upload_image",
translation_placeholders={"media_path": media_path},
) from err
kwargs.pop("media_path", None)
kwargs.pop("media_description", None)
media_type = get_media_type(media_path)
media_description = kwargs.get("media_description")
try:
media_ids: str | None = None
if media_data:
media_ids = media_data.id
client.status_post(media_ids=media_ids, **kwargs)
media_data = client.media_post(
media_file=media_path,
mime_type=media_type,
description=media_description,
)
except MastodonAPIError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unable_to_send_message",
translation_key="unable_to_upload_image",
translation_placeholders={"media_path": media_path},
) from err
hass.services.async_register(
DOMAIN, SERVICE_POST, async_post, schema=SERVICE_POST_SCHEMA
)
kwargs.pop("media_path", None)
kwargs.pop("media_description", None)
media_ids: str | None = None
if media_data:
media_ids = media_data.id
try:
client.status_post(media_ids=media_ids, **kwargs)
except MastodonAPIError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="unable_to_send_message",
) from err

View File

@@ -1,3 +1,14 @@
get_account:
fields:
config_entry_id:
required: true
selector:
config_entry:
integration: mastodon
account_name:
required: true
selector:
text:
post:
fields:
config_entry_id:

View File

@@ -1,7 +1,11 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"wrong_account": "You have to use the same account that was used to configure the integration."
},
"error": {
"network_error": "The Mastodon instance was not found.",
@@ -9,6 +13,28 @@
"unknown": "Unknown error occurred when connecting to the Mastodon instance."
},
"step": {
"reauth_confirm": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]"
},
"data_description": {
"access_token": "[%key:component::mastodon::config::step::user::data_description::access_token%]"
},
"description": "Please reauthenticate {account_name} with Mastodon."
},
"reconfigure": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]",
"client_id": "[%key:component::mastodon::config::step::user::data::client_id%]",
"client_secret": "[%key:component::mastodon::config::step::user::data::client_secret%]"
},
"data_description": {
"access_token": "[%key:component::mastodon::config::step::user::data_description::access_token%]",
"client_id": "[%key:component::mastodon::config::step::user::data_description::client_id%]",
"client_secret": "[%key:component::mastodon::config::step::user::data_description::client_secret%]"
},
"description": "Reconfigure {account_name} with Mastodon."
},
"user": {
"data": {
"access_token": "[%key:common::config_flow::data::access_token%]",
@@ -69,12 +95,18 @@
}
},
"exceptions": {
"auth_failed": {
"message": "Authentication failed, please reauthenticate with Mastodon."
},
"idempotency_key_too_short": {
"message": "Idempotency key must be at least 4 characters long."
},
"not_whitelisted_directory": {
"message": "{media} is not a whitelisted directory."
},
"unable_to_get_account": {
"message": "Unable to get account \"{account_name}\"."
},
"unable_to_send_message": {
"message": "Unable to send message."
},
@@ -93,6 +125,20 @@
}
},
"services": {
"get_account": {
"description": "Gets information about a Mastodon account.",
"fields": {
"account_name": {
"description": "The Mastodon account username (e.g. @user@instance).",
"name": "Account name"
},
"config_entry_id": {
"description": "Select the Mastodon instance to use to search.",
"name": "Mastodon instance"
}
},
"name": "Get account"
},
"post": {
"description": "Posts a status on your Mastodon account.",
"fields": {

View File

@@ -124,8 +124,13 @@ class MatterEntity(Entity):
and ep.has_attribute(None, entity_info.primary_attribute)
):
self._name_postfix = str(self._endpoint.endpoint_id)
if self._platform_translation_key and not self.translation_key:
self._attr_translation_key = self._platform_translation_key
# Always set translation_key for state_attributes translations.
# For primary entities (no postfix), suppress the translated name,
# so only the device name is used.
if self._platform_translation_key and not self.translation_key:
self._attr_translation_key = self._platform_translation_key
if not self._name_postfix:
self._attr_name = None
# Matter labels can be used to modify the entity name
# by appending the text.

View File

@@ -722,8 +722,8 @@ DISCOVERY_SCHEMAS = [
platform=Platform.SENSOR,
entity_description=MatterSensorEntityDescription(
key="NitrogenDioxideSensor",
translation_key="nitrogen_dioxide",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
state_class=SensorStateClass.MEASUREMENT,
),
entity_class=MatterSensor,

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["aiomealie==1.2.0"]
"requirements": ["aiomealie==1.2.1"]
}

View File

@@ -32,6 +32,12 @@
"core_temperature": {
"default": "mdi:thermometer-probe"
},
"degreasing_counter": {
"default": "mdi:hydro-power"
},
"descaling_counter": {
"default": "mdi:water-alert-outline"
},
"drying_step": {
"default": "mdi:water-outline"
},
@@ -44,6 +50,9 @@
"finish": {
"default": "mdi:clock-end"
},
"milk_cleaning_counter": {
"default": "mdi:pipe"
},
"plate": {
"default": "mdi:circle-outline",
"state": {

View File

@@ -704,7 +704,10 @@ SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleDevice], ...]] = (
POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]] = (
MieleSensorDefinition(
types=(MieleAppliance.WASHING_MACHINE,),
types=(
MieleAppliance.WASHING_MACHINE,
MieleAppliance.WASHER_DRYER,
),
description=MieleSensorDescription[MieleFillingLevel](
key="twin_dos_1_level",
translation_key="twin_dos_1_level",
@@ -714,7 +717,10 @@ POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]]
),
),
MieleSensorDefinition(
types=(MieleAppliance.WASHING_MACHINE,),
types=(
MieleAppliance.WASHING_MACHINE,
MieleAppliance.WASHER_DRYER,
),
description=MieleSensorDescription[MieleFillingLevel](
key="twin_dos_2_level",
translation_key="twin_dos_2_level",
@@ -753,6 +759,36 @@ POLLED_SENSOR_TYPES: Final[tuple[MieleSensorDefinition[MieleFillingLevel], ...]]
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.COFFEE_SYSTEM,),
description=MieleSensorDescription[MieleFillingLevel](
key="descaling_counter",
translation_key="descaling_counter",
value_fn=lambda value: value.descaling_counter,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.COFFEE_SYSTEM,),
description=MieleSensorDescription[MieleFillingLevel](
key="degreasing_counter",
translation_key="degreasing_counter",
value_fn=lambda value: value.degreasing_counter,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
MieleSensorDefinition(
types=(MieleAppliance.COFFEE_SYSTEM,),
description=MieleSensorDescription[MieleFillingLevel](
key="milk_cleaning_counter",
translation_key="milk_cleaning_counter",
value_fn=lambda value: value.milk_cleaning_counter,
state_class=SensorStateClass.TOTAL_INCREASING,
entity_category=EntityCategory.DIAGNOSTIC,
),
),
)

View File

@@ -206,6 +206,12 @@
"core_temperature": {
"name": "Core temperature"
},
"degreasing_counter": {
"name": "Degreasing cycles"
},
"descaling_counter": {
"name": "Descaling cycles"
},
"drying_step": {
"name": "Drying step",
"state": {
@@ -231,6 +237,9 @@
"finish": {
"name": "Finish"
},
"milk_cleaning_counter": {
"name": "Milk pipework cleaning cycles"
},
"plate": {
"name": "Plate {plate_no}",
"state": {

View File

@@ -19,7 +19,6 @@ ABBREVIATIONS = {
"bri_tpl": "brightness_template",
"bri_val_tpl": "brightness_value_template",
"clr_temp_cmd_tpl": "color_temp_command_template",
"clrm": "color_mode",
"clrm_stat_t": "color_mode_state_topic",
"clrm_val_tpl": "color_mode_value_template",
"clr_temp_cmd_t": "color_temp_command_topic",

View File

@@ -71,7 +71,6 @@ CONF_BRIGHTNESS_SCALE = "brightness_scale"
CONF_BRIGHTNESS_STATE_TOPIC = "brightness_state_topic"
CONF_BRIGHTNESS_TEMPLATE = "brightness_template"
CONF_BRIGHTNESS_VALUE_TEMPLATE = "brightness_value_template"
CONF_COLOR_MODE = "color_mode"
CONF_COLOR_MODE_STATE_TOPIC = "color_mode_state_topic"
CONF_COLOR_MODE_VALUE_TEMPLATE = "color_mode_value_template"
CONF_COLOR_TEMP_COMMAND_TEMPLATE = "color_temp_command_template"

View File

@@ -35,13 +35,9 @@ from homeassistant.components.light import (
)
from homeassistant.const import (
CONF_BRIGHTNESS,
CONF_COLOR_TEMP,
CONF_EFFECT,
CONF_HS,
CONF_NAME,
CONF_OPTIMISTIC,
CONF_RGB,
CONF_XY,
STATE_ON,
)
from homeassistant.core import callback
@@ -55,7 +51,6 @@ from homeassistant.util.json import json_loads_object
from .. import subscription
from ..config import DEFAULT_QOS, DEFAULT_RETAIN, MQTT_RW_SCHEMA
from ..const import (
CONF_COLOR_MODE,
CONF_COLOR_TEMP_KELVIN,
CONF_COMMAND_TOPIC,
CONF_EFFECT_LIST,
@@ -96,7 +91,7 @@ DEFAULT_NAME = "MQTT JSON Light"
DEFAULT_FLASH = True
DEFAULT_TRANSITION = True
_PLATFORM_SCHEMA_BASE = (
PLATFORM_SCHEMA_MODERN_JSON = (
MQTT_RW_SCHEMA.extend(
{
vol.Optional(CONF_BRIGHTNESS, default=DEFAULT_BRIGHTNESS): cv.boolean,
@@ -139,24 +134,8 @@ _PLATFORM_SCHEMA_BASE = (
.extend(MQTT_LIGHT_SCHEMA_SCHEMA.schema)
)
# Support for legacy color_mode handling was removed with HA Core 2025.3
# The removed attributes can be removed from the schema's from HA Core 2026.3
DISCOVERY_SCHEMA_JSON = vol.All(
cv.removed(CONF_COLOR_MODE, raise_if_present=False),
cv.removed(CONF_COLOR_TEMP, raise_if_present=False),
cv.removed(CONF_HS, raise_if_present=False),
cv.removed(CONF_RGB, raise_if_present=False),
cv.removed(CONF_XY, raise_if_present=False),
_PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA),
)
PLATFORM_SCHEMA_MODERN_JSON = vol.All(
cv.removed(CONF_COLOR_MODE),
cv.removed(CONF_COLOR_TEMP),
cv.removed(CONF_HS),
cv.removed(CONF_RGB),
cv.removed(CONF_XY),
_PLATFORM_SCHEMA_BASE,
PLATFORM_SCHEMA_MODERN_JSON.extend({}, extra=vol.REMOVE_EXTRA),
)

View File

@@ -0,0 +1,28 @@
"""The MTA New York City Transit integration."""
from __future__ import annotations
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from .const import DOMAIN as DOMAIN
from .coordinator import MTAConfigEntry, MTADataUpdateCoordinator
PLATFORMS = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: MTAConfigEntry) -> bool:
"""Set up MTA from a config entry."""
coordinator = MTADataUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: MTAConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -0,0 +1,151 @@
"""Config flow for MTA New York City Transit integration."""
from __future__ import annotations
import logging
from typing import Any
from pymta import LINE_TO_FEED, MTAFeedError, SubwayFeed
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import CONF_LINE, CONF_STOP_ID, CONF_STOP_NAME, DOMAIN
_LOGGER = logging.getLogger(__name__)
class MTAConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for MTA."""
VERSION = 1
MINOR_VERSION = 1
def __init__(self) -> None:
"""Initialize the config flow."""
self.data: dict[str, Any] = {}
self.stops: dict[str, str] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self.data[CONF_LINE] = user_input[CONF_LINE]
return await self.async_step_stop()
lines = sorted(LINE_TO_FEED.keys())
line_options = [SelectOptionDict(value=line, label=line) for line in lines]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_LINE): SelectSelector(
SelectSelectorConfig(
options=line_options,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
),
errors=errors,
)
async def async_step_stop(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the stop step."""
errors: dict[str, str] = {}
if user_input is not None:
stop_id = user_input[CONF_STOP_ID]
self.data[CONF_STOP_ID] = stop_id
stop_name = self.stops.get(stop_id, stop_id)
self.data[CONF_STOP_NAME] = stop_name
unique_id = f"{self.data[CONF_LINE]}_{stop_id}"
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
# Test connection to real-time GTFS-RT feed (different from static GTFS used by get_stops)
try:
await self._async_test_connection()
except MTAFeedError:
errors["base"] = "cannot_connect"
else:
title = f"{self.data[CONF_LINE]} Line - {stop_name}"
return self.async_create_entry(
title=title,
data=self.data,
)
try:
self.stops = await self._async_get_stops(self.data[CONF_LINE])
except MTAFeedError:
_LOGGER.exception("Error fetching stops for line %s", self.data[CONF_LINE])
return self.async_abort(reason="cannot_connect")
if not self.stops:
_LOGGER.error("No stops found for line %s", self.data[CONF_LINE])
return self.async_abort(reason="no_stops")
stop_options = [
SelectOptionDict(value=stop_id, label=stop_name)
for stop_id, stop_name in sorted(self.stops.items(), key=lambda x: x[1])
]
return self.async_show_form(
step_id="stop",
data_schema=vol.Schema(
{
vol.Required(CONF_STOP_ID): SelectSelector(
SelectSelectorConfig(
options=stop_options,
mode=SelectSelectorMode.DROPDOWN,
)
),
}
),
errors=errors,
description_placeholders={"line": self.data[CONF_LINE]},
)
async def _async_get_stops(self, line: str) -> dict[str, str]:
"""Get stops for a line from the library."""
feed_id = SubwayFeed.get_feed_id_for_route(line)
session = aiohttp_client.async_get_clientsession(self.hass)
subway_feed = SubwayFeed(feed_id=feed_id, session=session)
stops_list = await subway_feed.get_stops(route_id=line)
stops = {}
for stop in stops_list:
stop_id = stop["stop_id"]
stop_name = stop["stop_name"]
# Add direction label (stop_id always ends in N or S)
direction = stop_id[-1]
stops[stop_id] = f"{stop_name} ({direction} direction)"
return stops
async def _async_test_connection(self) -> None:
"""Test connection to MTA feed."""
feed_id = SubwayFeed.get_feed_id_for_route(self.data[CONF_LINE])
session = aiohttp_client.async_get_clientsession(self.hass)
subway_feed = SubwayFeed(feed_id=feed_id, session=session)
await subway_feed.get_arrivals(
route_id=self.data[CONF_LINE],
stop_id=self.data[CONF_STOP_ID],
max_arrivals=1,
)

View File

@@ -0,0 +1,11 @@
"""Constants for the MTA New York City Transit integration."""
from datetime import timedelta
DOMAIN = "mta"
CONF_LINE = "line"
CONF_STOP_ID = "stop_id"
CONF_STOP_NAME = "stop_name"
UPDATE_INTERVAL = timedelta(seconds=30)

View File

@@ -0,0 +1,110 @@
"""Data update coordinator for MTA New York City Transit."""
from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime
import logging
from pymta import MTAFeedError, SubwayFeed
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from .const import CONF_LINE, CONF_STOP_ID, DOMAIN, UPDATE_INTERVAL
_LOGGER = logging.getLogger(__name__)
@dataclass
class MTAArrival:
"""Represents a single train arrival."""
arrival_time: datetime
minutes_until: int
route_id: str
destination: str
@dataclass
class MTAData:
"""Data for MTA arrivals."""
arrivals: list[MTAArrival]
type MTAConfigEntry = ConfigEntry[MTADataUpdateCoordinator]
class MTADataUpdateCoordinator(DataUpdateCoordinator[MTAData]):
"""Class to manage fetching MTA data."""
config_entry: MTAConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: MTAConfigEntry) -> None:
"""Initialize."""
self.line = config_entry.data[CONF_LINE]
self.stop_id = config_entry.data[CONF_STOP_ID]
self.feed_id = SubwayFeed.get_feed_id_for_route(self.line)
session = async_get_clientsession(hass)
self.subway_feed = SubwayFeed(feed_id=self.feed_id, session=session)
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=UPDATE_INTERVAL,
)
async def _async_update_data(self) -> MTAData:
"""Fetch data from MTA."""
_LOGGER.debug(
"Fetching data for line=%s, stop=%s, feed=%s",
self.line,
self.stop_id,
self.feed_id,
)
try:
library_arrivals = await self.subway_feed.get_arrivals(
route_id=self.line,
stop_id=self.stop_id,
max_arrivals=3,
)
except MTAFeedError as err:
raise UpdateFailed(f"Error fetching MTA data: {err}") from err
now = dt_util.now()
arrivals: list[MTAArrival] = []
for library_arrival in library_arrivals:
# Convert UTC arrival time to local time
arrival_time = dt_util.as_local(library_arrival.arrival_time)
minutes_until = int((arrival_time - now).total_seconds() / 60)
_LOGGER.debug(
"Stop %s: arrival_time=%s, minutes_until=%d, route=%s",
library_arrival.stop_id,
arrival_time,
minutes_until,
library_arrival.route_id,
)
arrivals.append(
MTAArrival(
arrival_time=arrival_time,
minutes_until=minutes_until,
route_id=library_arrival.route_id,
destination=library_arrival.destination,
)
)
_LOGGER.debug("Returning %d arrivals", len(arrivals))
return MTAData(arrivals=arrivals)

View File

@@ -0,0 +1,12 @@
{
"domain": "mta",
"name": "MTA New York City Transit",
"codeowners": ["@OnFreund"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/mta",
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pymta"],
"quality_scale": "silver",
"requirements": ["py-nymta==0.3.4"]
}

View File

@@ -0,0 +1,88 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: Integration does not register custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Integration does not explicitly subscribe to events in async_added_to_hass.
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not register custom actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: No configuration options.
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: No authentication required.
test-coverage: done
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: No discovery.
discovery:
status: exempt
comment: No discovery.
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices:
status: exempt
comment: No physical devices.
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices:
status: exempt
comment: Integration tracks a single configured stop, not dynamically discovered devices.
entity-category:
status: exempt
comment: All entities are primary entities without specific categories.
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: N/A
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: No repairs needed currently.
stale-devices:
status: exempt
comment: Integration tracks a single configured stop per entry, devices cannot become stale.
# Platinum
async-dependency: todo
inject-websession: done
strict-typing: todo

View File

@@ -0,0 +1,147 @@
"""Sensor platform for MTA New York City Transit."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_LINE, CONF_STOP_ID, CONF_STOP_NAME, DOMAIN
from .coordinator import MTAArrival, MTAConfigEntry, MTADataUpdateCoordinator
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class MTASensorEntityDescription(SensorEntityDescription):
"""Describes an MTA sensor entity."""
arrival_index: int
value_fn: Callable[[MTAArrival], datetime | str]
SENSOR_DESCRIPTIONS: tuple[MTASensorEntityDescription, ...] = (
MTASensorEntityDescription(
key="next_arrival",
translation_key="next_arrival",
device_class=SensorDeviceClass.TIMESTAMP,
arrival_index=0,
value_fn=lambda arrival: arrival.arrival_time,
),
MTASensorEntityDescription(
key="next_arrival_route",
translation_key="next_arrival_route",
arrival_index=0,
value_fn=lambda arrival: arrival.route_id,
),
MTASensorEntityDescription(
key="next_arrival_destination",
translation_key="next_arrival_destination",
arrival_index=0,
value_fn=lambda arrival: arrival.destination,
),
MTASensorEntityDescription(
key="second_arrival",
translation_key="second_arrival",
device_class=SensorDeviceClass.TIMESTAMP,
arrival_index=1,
value_fn=lambda arrival: arrival.arrival_time,
),
MTASensorEntityDescription(
key="second_arrival_route",
translation_key="second_arrival_route",
arrival_index=1,
value_fn=lambda arrival: arrival.route_id,
),
MTASensorEntityDescription(
key="second_arrival_destination",
translation_key="second_arrival_destination",
arrival_index=1,
value_fn=lambda arrival: arrival.destination,
),
MTASensorEntityDescription(
key="third_arrival",
translation_key="third_arrival",
device_class=SensorDeviceClass.TIMESTAMP,
arrival_index=2,
value_fn=lambda arrival: arrival.arrival_time,
),
MTASensorEntityDescription(
key="third_arrival_route",
translation_key="third_arrival_route",
arrival_index=2,
value_fn=lambda arrival: arrival.route_id,
),
MTASensorEntityDescription(
key="third_arrival_destination",
translation_key="third_arrival_destination",
arrival_index=2,
value_fn=lambda arrival: arrival.destination,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: MTAConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up MTA sensor based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
MTASensor(coordinator, entry, description)
for description in SENSOR_DESCRIPTIONS
)
class MTASensor(CoordinatorEntity[MTADataUpdateCoordinator], SensorEntity):
"""Sensor for MTA train arrivals."""
_attr_has_entity_name = True
entity_description: MTASensorEntityDescription
def __init__(
self,
coordinator: MTADataUpdateCoordinator,
entry: MTAConfigEntry,
description: MTASensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
line = entry.data[CONF_LINE]
stop_id = entry.data[CONF_STOP_ID]
stop_name = entry.data.get(CONF_STOP_NAME, stop_id)
self._attr_unique_id = f"{entry.unique_id}-{description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, entry.entry_id)},
name=f"{line} Line - {stop_name} ({stop_id})",
manufacturer="MTA",
model="Subway",
entry_type=DeviceEntryType.SERVICE,
)
@property
def native_value(self) -> datetime | str | None:
"""Return the state of the sensor."""
arrivals = self.coordinator.data.arrivals
if len(arrivals) <= self.entity_description.arrival_index:
return None
return self.entity_description.value_fn(
arrivals[self.entity_description.arrival_index]
)

View File

@@ -0,0 +1,65 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"no_stops": "No stops found for this line. The line may not be currently running."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
},
"step": {
"stop": {
"data": {
"stop_id": "Stop and direction"
},
"data_description": {
"stop_id": "Select the stop and direction you want to track"
},
"description": "Choose a stop on the {line} line. The direction is included with each stop.",
"title": "Select stop and direction"
},
"user": {
"data": {
"line": "Line"
},
"data_description": {
"line": "The subway line to track"
},
"description": "Choose the subway line you want to track.",
"title": "Select subway line"
}
}
},
"entity": {
"sensor": {
"next_arrival": {
"name": "Next arrival"
},
"next_arrival_destination": {
"name": "Next arrival destination"
},
"next_arrival_route": {
"name": "Next arrival route"
},
"second_arrival": {
"name": "Second arrival"
},
"second_arrival_destination": {
"name": "Second arrival destination"
},
"second_arrival_route": {
"name": "Second arrival route"
},
"third_arrival": {
"name": "Third arrival"
},
"third_arrival_destination": {
"name": "Third arrival destination"
},
"third_arrival_route": {
"name": "Third arrival route"
}
}
}
}

View File

@@ -272,7 +272,7 @@ class NumberDeviceClass(StrEnum):
NITROGEN_DIOXIDE = "nitrogen_dioxide"
"""Amount of NO2.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `μg/m³`
"""
NITROGEN_MONOXIDE = "nitrogen_monoxide"
@@ -544,6 +544,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
NumberDeviceClass.MOISTURE: {PERCENTAGE},
NumberDeviceClass.NITROGEN_DIOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.NITROGEN_MONOXIDE: {

View File

@@ -102,7 +102,7 @@ class OneDriveConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
reauth_entry = self._get_reauth_entry()
return self.async_update_reload_and_abort(
entry=reauth_entry,
data=data,
data_updates=data,
)
if self.source != SOURCE_RECONFIGURE:

View File

@@ -17,7 +17,7 @@ from onedrive_personal_sdk.exceptions import (
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import (
OAuth2Session,
@@ -111,7 +111,7 @@ async def _handle_item_operation[T](func: Callable[[], Awaitable[T]], folder: st
except NotFoundError:
raise
except AuthenticationError as err:
raise ConfigEntryError(
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN, translation_key="authentication_failed"
) from err
except (OneDriveException, TimeoutError) as err:

View File

@@ -81,6 +81,7 @@ def handle_backup_errors[_R, **P](
try:
return await func(self, *args, **kwargs)
except AuthenticationError as err:
self._entry.async_start_reauth(self._hass)
raise BackupAgentError("Authentication error") from err
except OneDriveException as err:
_LOGGER.error(

View File

@@ -2,15 +2,20 @@
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any, cast
from onedrive_personal_sdk.clients.client import OneDriveClient
from onedrive_personal_sdk.exceptions import OneDriveException
from onedrive_personal_sdk.models.items import AppRoot
from onedrive_personal_sdk.models.items import Drive
import voluptuous as vol
from homeassistant.config_entries import ConfigFlowResult
from homeassistant.config_entries import (
SOURCE_REAUTH,
SOURCE_RECONFIGURE,
ConfigFlowResult,
)
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
@@ -33,7 +38,7 @@ class OneDriveForBusinessConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
DOMAIN = DOMAIN
client: OneDriveClient
approot: AppRoot
drive: Drive
@property
def logger(self) -> logging.Logger:
@@ -97,8 +102,7 @@ class OneDriveForBusinessConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
)
try:
self.approot = await self.client.get_approot()
drive = await self.client.get_drive()
self.drive = await self.client.get_drive()
except OneDriveException:
self.logger.exception("Failed to connect to OneDrive")
return self.async_abort(reason="connection_error")
@@ -106,11 +110,25 @@ class OneDriveForBusinessConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
self.logger.exception("Unknown error")
return self.async_abort(reason="unknown")
await self.async_set_unique_id(drive.id)
self._abort_if_unique_id_configured()
await self.async_set_unique_id(self.drive.id)
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch(reason="wrong_drive")
return self.async_update_reload_and_abort(
entry=self._get_reauth_entry(),
data_updates=data,
)
if self.source == SOURCE_RECONFIGURE:
self._abort_if_unique_id_mismatch(reason="wrong_drive")
else:
self._abort_if_unique_id_configured()
self._data.update(data)
if self.source == SOURCE_RECONFIGURE:
return await self.async_step_reconfigure_folder()
return await self.async_step_select_folder()
async def async_step_select_folder(
@@ -128,9 +146,11 @@ class OneDriveForBusinessConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
errors["base"] = "folder_creation_error"
if not errors:
title = (
f"{self.approot.created_by.user.display_name}'s OneDrive"
if self.approot.created_by.user
and self.approot.created_by.user.display_name
f"{self.drive.owner.user.display_name}'s OneDrive ({self.drive.owner.user.email})"
if self.drive.owner
and self.drive.owner.user
and self.drive.owner.user.display_name
and self.drive.owner.user.email
else "OneDrive"
)
return self.async_create_entry(
@@ -147,3 +167,60 @@ class OneDriveForBusinessConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
data_schema=FOLDER_NAME_SCHEMA,
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Reconfigure the entry."""
self._data[CONF_TENANT_ID] = self._get_reconfigure_entry().data[CONF_TENANT_ID]
with tenant_id_context(self._data[CONF_TENANT_ID]):
return await self.async_step_pick_implementation()
async def async_step_reconfigure_folder(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Step to ask for new folder path during reconfiguration."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
path = str(user_input[CONF_FOLDER_PATH]).lstrip("/")
try:
folder = await self.client.create_folder("root", path)
except OneDriveException:
self.logger.debug("Failed to create folder", exc_info=True)
errors["base"] = "folder_creation_error"
if not errors:
return self.async_update_reload_and_abort(
reconfigure_entry,
data={
**self._data,
CONF_FOLDER_ID: folder.id,
CONF_FOLDER_PATH: user_input[CONF_FOLDER_PATH],
},
)
return self.async_show_form(
step_id="reconfigure_folder",
data_schema=self.add_suggested_values_to_schema(
FOLDER_NAME_SCHEMA,
{CONF_FOLDER_PATH: reconfigure_entry.data[CONF_FOLDER_PATH]},
),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauth dialog."""
if user_input is None:
return self.async_show_form(step_id="reauth_confirm")
self._data[CONF_TENANT_ID] = self._get_reauth_entry().data[CONF_TENANT_ID]
with tenant_id_context(self._data[CONF_TENANT_ID]):
return await self.async_step_pick_implementation()

View File

@@ -55,7 +55,7 @@ rules:
status: exempt
comment: |
This integration does not create entities.
reauthentication-flow: todo
reauthentication-flow: done
test-coverage: done
# Gold
@@ -116,7 +116,7 @@ rules:
status: exempt
comment: |
This integration does not create entities.
reconfiguration-flow: todo
reconfiguration-flow: done
repair-issues:
status: exempt
comment: |

View File

@@ -11,8 +11,11 @@
"oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]",
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]"
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
"wrong_drive": "[%key:component::onedrive::config::abort::wrong_drive%]"
},
"create_entry": {
"default": "[%key:common::config_flow::create_entry::authenticated%]"
@@ -35,11 +38,25 @@
"tenant_id": "Tenant ID"
},
"data_description": {
"tenant_id": "The tenant ID of your OneDrive for Business account's tenant."
"tenant_id": "The directory (tenant) ID from the Microsoft Entra admin center."
},
"description": "To set up Onedrive for Business you need to create an app registration in the [Microsoft Entra admin center]({entra_url}) and set the redirect URI to `{redirect_url}`. In this step enter the tenant ID of the tenant where you created the app registration. In the next step you will be asked to provide the client ID and client secret of the app registration.",
"description": "To set up OneDrive for Business, you need to create an app registration in the [Microsoft Entra admin center]({entra_url}) and set the redirect URI to `{redirect_url}`. In this step, enter the ID of the tenant where you created the app registration. In the next step, you will be asked to provide the client ID and client secret of the app registration.",
"title": "Select tenant"
},
"reauth_confirm": {
"description": "The OneDrive for Business integration needs to re-authenticate your account",
"title": "[%key:common::config_flow::title::reauth%]"
},
"reconfigure_folder": {
"data": {
"folder_path": "[%key:component::onedrive_for_business::config::step::select_folder::data::folder_path%]"
},
"data_description": {
"folder_path": "[%key:component::onedrive_for_business::config::step::select_folder::data_description::folder_path%]"
},
"description": "[%key:component::onedrive_for_business::config::step::select_folder::description%]",
"title": "[%key:component::onedrive_for_business::config::step::select_folder::title%]"
},
"select_folder": {
"data": {
"folder_path": "Folder path"

View File

@@ -7,7 +7,7 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "silver",
"requirements": ["powerfox==2.0.0"],
"requirements": ["powerfox==2.1.0"],
"zeroconf": [
{
"name": "powerfox*",

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["ical"],
"quality_scale": "silver",
"requirements": ["ical==12.1.3"]
"requirements": ["ical==13.2.0"]
}

View File

@@ -20,7 +20,7 @@
"loggers": ["roborock"],
"quality_scale": "silver",
"requirements": [
"python-roborock==4.12.0",
"python-roborock==4.14.0",
"vacuum-map-parser-roborock==0.1.4"
]
}

View File

@@ -125,7 +125,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity):
self._cap_position = self.vacuum_state.get("cap", {}).get("pose") == 1
@property
def activity(self):
def activity(self) -> VacuumActivity:
"""Return the state of the vacuum cleaner."""
clean_mission_status = self.vacuum_state.get("cleanMissionStatus", {})
cycle = clean_mission_status.get("cycle")
@@ -213,7 +213,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity):
else:
await self.hass.async_add_executor_job(self.vacuum.send_command, "start")
async def async_stop(self, **kwargs):
async def async_stop(self, **kwargs: Any) -> None:
"""Stop the vacuum cleaner."""
await self.hass.async_add_executor_job(self.vacuum.send_command, "stop")
@@ -221,7 +221,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity):
"""Pause the cleaning cycle."""
await self.hass.async_add_executor_job(self.vacuum.send_command, "pause")
async def async_return_to_base(self, **kwargs):
async def async_return_to_base(self, **kwargs: Any) -> None:
"""Set the vacuum cleaner to return to the dock."""
if self.state == VacuumActivity.CLEANING:
await self.async_pause()
@@ -231,11 +231,16 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity):
await asyncio.sleep(1)
await self.hass.async_add_executor_job(self.vacuum.send_command, "dock")
async def async_locate(self, **kwargs):
async def async_locate(self, **kwargs: Any) -> None:
"""Located vacuum."""
await self.hass.async_add_executor_job(self.vacuum.send_command, "find")
async def async_send_command(self, command, params=None, **kwargs):
async def async_send_command(
self,
command: str,
params: dict[str, Any] | list[Any] | None = None,
**kwargs: Any,
) -> None:
"""Send raw command."""
_LOGGER.debug("async_send_command %s (%s), %s", command, params, kwargs)
await self.hass.async_add_executor_job(
@@ -270,7 +275,7 @@ class RoombaVacuumCarpetBoost(RoombaVacuum):
_attr_supported_features = SUPPORT_ROOMBA_CARPET_BOOST
@property
def fan_speed(self):
def fan_speed(self) -> str | None:
"""Return the fan speed of the vacuum cleaner."""
fan_speed = None
carpet_boost = self.vacuum_state.get("carpetBoost")
@@ -284,7 +289,7 @@ class RoombaVacuumCarpetBoost(RoombaVacuum):
fan_speed = FAN_SPEED_ECO
return fan_speed
async def async_set_fan_speed(self, fan_speed, **kwargs):
async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
"""Set fan speed."""
if fan_speed.capitalize() in FAN_SPEEDS:
fan_speed = fan_speed.capitalize()
@@ -329,7 +334,7 @@ class BraavaJet(IRobotVacuum):
]
@property
def fan_speed(self):
def fan_speed(self) -> str:
"""Return the fan speed of the vacuum cleaner."""
# Mopping behavior and spray amount as fan speed
rank_overlap = self.vacuum_state.get("rankOverlap", {})
@@ -345,7 +350,7 @@ class BraavaJet(IRobotVacuum):
pad_wetness_value = pad_wetness.get("disposable")
return f"{behavior}-{pad_wetness_value}"
async def async_set_fan_speed(self, fan_speed, **kwargs):
async def async_set_fan_speed(self, fan_speed: str, **kwargs: Any) -> None:
"""Set fan speed."""
try:
split = fan_speed.split("-", 1)

View File

@@ -8,5 +8,5 @@
"iot_class": "local_polling",
"loggers": ["pysaunum"],
"quality_scale": "platinum",
"requirements": ["pysaunum==0.3.0"]
"requirements": ["pysaunum==0.5.0"]
}

View File

@@ -286,7 +286,7 @@ class SensorDeviceClass(StrEnum):
NITROGEN_DIOXIDE = "nitrogen_dioxide"
"""Amount of NO2.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `μg/m³`
"""
NITROGEN_MONOXIDE = "nitrogen_monoxide"
@@ -639,6 +639,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
SensorDeviceClass.MOISTURE: {PERCENTAGE},
SensorDeviceClass.NITROGEN_DIOXIDE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.NITROGEN_MONOXIDE: {

View File

@@ -12,7 +12,7 @@
"integration_type": "device",
"iot_class": "local_push",
"quality_scale": "silver",
"requirements": ["pysmlight==0.2.13"],
"requirements": ["pysmlight==0.2.14"],
"zeroconf": [
{
"type": "_slzb-06._tcp.local."

View File

@@ -528,7 +528,7 @@ async def _call_service(
service_name = service.service
kwargs = dict(service.data)
kwargs[ATTR_TARGET] = chat_id
kwargs[ATTR_CHAT_ID] = chat_id
messages: dict[str, JsonValueType] | None = None
if service_name == SERVICE_SEND_MESSAGE:

View File

@@ -86,7 +86,6 @@ from .const import (
ATTR_REPLYMARKUP,
ATTR_RESIZE_KEYBOARD,
ATTR_STICKER_ID,
ATTR_TARGET,
ATTR_TEXT,
ATTR_TIMEOUT,
ATTR_TITLE,
@@ -332,35 +331,6 @@ class TelegramNotificationService:
inline_message_id = msg_data[ATTR_INLINE_MESSAGE_ID]
return message_id, inline_message_id
def get_target_chat_ids(self, target: int | list[int] | None) -> list[int]:
"""Validate chat_id targets or return default target (first).
:param target: optional list of integers ([12234, -12345])
:return list of chat_id targets (integers)
"""
allowed_chat_ids: list[int] = [
subentry.data[CONF_CHAT_ID] for subentry in self.config.subentries.values()
]
if target is None:
return [allowed_chat_ids[0]]
chat_ids = [target] if isinstance(target, int) else target
valid_chat_ids = [
chat_id for chat_id in chat_ids if chat_id in allowed_chat_ids
]
if not valid_chat_ids:
raise ServiceValidationError(
"Invalid chat IDs",
translation_domain=DOMAIN,
translation_key="invalid_chat_ids",
translation_placeholders={
"chat_ids": ", ".join(str(chat_id) for chat_id in chat_ids),
"bot_name": self.config.title,
},
)
return valid_chat_ids
def _get_msg_kwargs(self, data: dict[str, Any]) -> dict[str, Any]:
"""Get parameters in message data kwargs."""
@@ -476,7 +446,7 @@ class TelegramNotificationService:
:return: dict with chat_id keys and message_id values for successful sends
"""
chat_ids = self.get_target_chat_ids(kwargs_msg.pop(ATTR_TARGET, None))
chat_ids = [kwargs_msg.pop(ATTR_CHAT_ID)]
msg_ids: dict[str, JsonValueType] = {}
for chat_id in chat_ids:
_LOGGER.debug("%s to chat ID %s", func_send.__name__, chat_id)
@@ -561,8 +531,8 @@ class TelegramNotificationService:
async def send_message(
self,
message: str = "",
target: Any = None,
message: str,
chat_id: int,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> dict[str, JsonValueType]:
@@ -575,7 +545,7 @@ class TelegramNotificationService:
"Error sending message",
params[ATTR_MESSAGE_TAG],
text,
target=target,
chat_id=chat_id,
parse_mode=params[ATTR_PARSER],
disable_web_page_preview=params[ATTR_DISABLE_WEB_PREV],
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -588,12 +558,11 @@ class TelegramNotificationService:
async def delete_message(
self,
chat_id: int | None = None,
chat_id: int,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> bool:
"""Delete a previously sent message."""
chat_id = self.get_target_chat_ids(chat_id)[0]
message_id, _ = self._get_msg_ids(kwargs, chat_id)
_LOGGER.debug("Delete message %s in chat ID %s", message_id, chat_id)
deleted: bool = await self._send_msg(
@@ -613,12 +582,11 @@ class TelegramNotificationService:
async def edit_message_media(
self,
media_type: str,
chat_id: int | None = None,
chat_id: int,
context: Context | None = None,
**kwargs: Any,
) -> Any:
"Edit message media of a previously sent message."
chat_id = self.get_target_chat_ids(chat_id)[0]
message_id, inline_message_id = self._get_msg_ids(kwargs, chat_id)
params = self._get_msg_kwargs(kwargs)
_LOGGER.debug(
@@ -690,12 +658,11 @@ class TelegramNotificationService:
async def edit_message(
self,
type_edit: str,
chat_id: int | None = None,
chat_id: int,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> Any:
"""Edit a previously sent message."""
chat_id = self.get_target_chat_ids(chat_id)[0]
message_id, inline_message_id = self._get_msg_ids(kwargs, chat_id)
params = self._get_msg_kwargs(kwargs)
_LOGGER.debug(
@@ -779,25 +746,24 @@ class TelegramNotificationService:
async def send_chat_action(
self,
chat_id: int,
chat_action: str = "",
target: Any = None,
context: Context | None = None,
**kwargs: Any,
) -> dict[str, JsonValueType]:
"""Send a chat action to pre-allowed chat IDs."""
result: dict[str, JsonValueType] = {}
for chat_id in self.get_target_chat_ids(target):
_LOGGER.debug("Send action %s in chat ID %s", chat_action, chat_id)
is_successful = await self._send_msg(
self.bot.send_chat_action,
"Error sending action",
None,
chat_id=chat_id,
action=chat_action,
message_thread_id=kwargs.get(ATTR_MESSAGE_THREAD_ID),
context=context,
)
result[str(chat_id)] = is_successful
_LOGGER.debug("Send action %s in chat ID %s", chat_action, chat_id)
is_successful = await self._send_msg(
self.bot.send_chat_action,
"Error sending action",
None,
chat_id=chat_id,
action=chat_action,
message_thread_id=kwargs.get(ATTR_MESSAGE_THREAD_ID),
context=context,
)
result[str(chat_id)] = is_successful
return result
async def send_file(
@@ -827,7 +793,7 @@ class TelegramNotificationService:
self.bot.send_photo,
"Error sending photo",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
photo=file_content,
caption=kwargs.get(ATTR_CAPTION),
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -844,7 +810,7 @@ class TelegramNotificationService:
self.bot.send_sticker,
"Error sending sticker",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
sticker=file_content,
disable_notification=params[ATTR_DISABLE_NOTIF],
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
@@ -859,7 +825,7 @@ class TelegramNotificationService:
self.bot.send_video,
"Error sending video",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
video=file_content,
caption=kwargs.get(ATTR_CAPTION),
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -876,7 +842,7 @@ class TelegramNotificationService:
self.bot.send_document,
"Error sending document",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
document=file_content,
caption=kwargs.get(ATTR_CAPTION),
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -893,7 +859,7 @@ class TelegramNotificationService:
self.bot.send_voice,
"Error sending voice",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
voice=file_content,
caption=kwargs.get(ATTR_CAPTION),
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -909,7 +875,7 @@ class TelegramNotificationService:
self.bot.send_animation,
"Error sending animation",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
animation=file_content,
caption=kwargs.get(ATTR_CAPTION),
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -935,7 +901,7 @@ class TelegramNotificationService:
self.bot.send_sticker,
"Error sending sticker",
params[ATTR_MESSAGE_TAG],
target=kwargs.get(ATTR_TARGET),
chat_id=kwargs[ATTR_CHAT_ID],
sticker=stickerid,
disable_notification=params[ATTR_DISABLE_NOTIF],
reply_to_message_id=params[ATTR_REPLY_TO_MSGID],
@@ -950,7 +916,6 @@ class TelegramNotificationService:
self,
latitude: Any,
longitude: Any,
target: Any = None,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> dict[str, JsonValueType]:
@@ -962,7 +927,7 @@ class TelegramNotificationService:
self.bot.send_location,
"Error sending location",
params[ATTR_MESSAGE_TAG],
target=target,
chat_id=kwargs[ATTR_CHAT_ID],
latitude=latitude,
longitude=longitude,
disable_notification=params[ATTR_DISABLE_NOTIF],
@@ -978,7 +943,6 @@ class TelegramNotificationService:
options: Sequence[str | InputPollOption],
is_anonymous: bool | None,
allows_multiple_answers: bool | None,
target: Any = None,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> dict[str, JsonValueType]:
@@ -989,7 +953,7 @@ class TelegramNotificationService:
self.bot.send_poll,
"Error sending poll",
params[ATTR_MESSAGE_TAG],
target=target,
chat_id=kwargs[ATTR_CHAT_ID],
question=question,
options=options,
is_anonymous=is_anonymous,
@@ -1004,12 +968,11 @@ class TelegramNotificationService:
async def leave_chat(
self,
chat_id: int | None = None,
chat_id: int,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> Any:
"""Remove bot from chat."""
chat_id = self.get_target_chat_ids(chat_id)[0]
_LOGGER.debug("Leave from chat ID %s", chat_id)
return await self._send_msg(
self.bot.leave_chat, "Error leaving chat", None, chat_id, context=context
@@ -1018,13 +981,12 @@ class TelegramNotificationService:
async def set_message_reaction(
self,
reaction: str,
chat_id: int | None = None,
chat_id: int,
is_big: bool = False,
context: Context | None = None,
**kwargs: dict[str, Any],
) -> None:
"""Set the bot's reaction for a given message."""
chat_id = self.get_target_chat_ids(chat_id)[0]
message_id, _ = self._get_msg_ids(kwargs, chat_id)
params = self._get_msg_kwargs(kwargs)

View File

@@ -4,7 +4,6 @@ from __future__ import annotations
import asyncio
from collections.abc import Coroutine
from functools import partial
import logging
from typing import Any
@@ -13,7 +12,10 @@ from homeassistant.components.automation import (
DOMAIN as AUTOMATION_DOMAIN,
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
)
from homeassistant.components.labs import async_listen as async_labs_listen
from homeassistant.components.labs import (
EventLabsUpdatedData,
async_subscribe_preview_feature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_DEVICE_ID,
@@ -22,7 +24,7 @@ from homeassistant.const import (
CONF_UNIQUE_ID,
SERVICE_RELOAD,
)
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
from homeassistant.core import Event, HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryError, HomeAssistantError
from homeassistant.helpers import discovery, issue_registry as ir
from homeassistant.helpers.device import (
@@ -99,18 +101,19 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async_register_admin_service(hass, DOMAIN, SERVICE_RELOAD, _reload_config)
@callback
def new_triggers_conditions_listener() -> None:
async def _handle_new_triggers_conditions(
_event_data: EventLabsUpdatedData,
) -> None:
"""Handle new_triggers_conditions flag change."""
hass.async_create_task(
_reload_config(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
)
async_labs_listen(
async_subscribe_preview_feature(
hass,
AUTOMATION_DOMAIN,
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
new_triggers_conditions_listener,
_handle_new_triggers_conditions,
)
return True
@@ -139,12 +142,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry, (entry.options["template_type"],)
)
async def _handle_entry_reload(_event_data: EventLabsUpdatedData) -> None:
hass.config_entries.async_schedule_reload(entry.entry_id)
entry.async_on_unload(
async_labs_listen(
async_subscribe_preview_feature(
hass,
AUTOMATION_DOMAIN,
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
partial(hass.config_entries.async_schedule_reload, entry.entry_id),
_handle_entry_reload,
)
)

View File

@@ -13,17 +13,23 @@ from tesla_fleet_api.exceptions import (
TeslaFleetError,
)
from tesla_fleet_api.tessie import Tessie
from tessie_api import get_state_of_all_vehicles
from tessie_api import get_battery, get_state_of_all_vehicles
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryError,
ConfigEntryNotReady,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceInfo
from .const import DOMAIN, MODELS
from .coordinator import (
TessieBatteryHealthCoordinator,
TessieEnergyHistoryCoordinator,
TessieEnergySiteInfoCoordinator,
TessieEnergySiteLiveCoordinator,
TessieStateUpdateCoordinator,
@@ -64,8 +70,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
except ClientResponseError as e:
if e.status == HTTPStatus.UNAUTHORIZED:
raise ConfigEntryAuthFailed from e
_LOGGER.error("Setup failed, unable to connect to Tessie: %s", e)
return False
raise ConfigEntryError("Setup failed, unable to connect to Tessie") from e
except ClientError as e:
raise ConfigEntryNotReady from e
try:
batteries = await asyncio.gather(
*(
get_battery(
session=session,
api_key=api_key,
vin=vehicle["vin"],
)
for vehicle in state_of_all_vehicles["results"]
if vehicle["last_state"] is not None
)
)
except ClientResponseError as e:
if e.status == HTTPStatus.UNAUTHORIZED:
raise ConfigEntryAuthFailed from e
raise ConfigEntryError("Setup failed, unable to get battery data") from e
except ClientError as e:
raise ConfigEntryNotReady from e
@@ -79,6 +103,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
vin=vehicle["vin"],
data=vehicle["last_state"],
),
battery_coordinator=TessieBatteryHealthCoordinator(
hass,
entry,
api_key=api_key,
vin=vehicle["vin"],
data=battery,
),
device=DeviceInfo(
identifiers={(DOMAIN, vehicle["vin"])},
manufacturer="Tesla",
@@ -95,8 +126,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
serial_number=vehicle["vin"],
),
)
for vehicle in state_of_all_vehicles["results"]
if vehicle["last_state"] is not None
for vehicle, battery in zip(
(
v
for v in state_of_all_vehicles["results"]
if v["last_state"] is not None
),
batteries,
strict=True,
)
]
# Energy Sites
@@ -137,6 +175,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
except TeslaFleetError as e:
raise ConfigEntryNotReady(e.message) from e
powerwall = (
product["components"]["battery"] or product["components"]["solar"]
)
energysites.append(
TessieEnergyData(
api=api,
@@ -151,6 +193,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
info_coordinator=TessieEnergySiteInfoCoordinator(
hass, entry, api
),
history_coordinator=(
TessieEnergyHistoryCoordinator(hass, entry, api)
if powerwall
else None
),
device=DeviceInfo(
identifiers={(DOMAIN, str(site_id))},
manufacturer="Tesla",
@@ -170,6 +217,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: TessieConfigEntry) -> bo
energysite.info_coordinator.async_config_entry_first_refresh()
for energysite in energysites
),
*(
energysite.history_coordinator.async_config_entry_first_refresh()
for energysite in energysites
if energysite.history_coordinator is not None
),
)
entry.runtime_data = TessieData(vehicles, energysites)

View File

@@ -160,10 +160,16 @@ VEHICLE_DESCRIPTIONS: tuple[TessieBinarySensorEntityDescription, ...] = (
),
)
ENERGY_LIVE_DESCRIPTIONS: tuple[BinarySensorEntityDescription, ...] = (
BinarySensorEntityDescription(key="backup_capable"),
BinarySensorEntityDescription(key="grid_services_active"),
BinarySensorEntityDescription(key="storm_mode_active"),
ENERGY_LIVE_DESCRIPTIONS: tuple[TessieBinarySensorEntityDescription, ...] = (
TessieBinarySensorEntityDescription(key="backup_capable"),
TessieBinarySensorEntityDescription(key="grid_services_active"),
TessieBinarySensorEntityDescription(
key="grid_status",
is_on=lambda x: x == "Active",
device_class=BinarySensorDeviceClass.POWER,
entity_category=EntityCategory.DIAGNOSTIC,
),
TessieBinarySensorEntityDescription(key="storm_mode_active"),
)
@@ -225,21 +231,28 @@ class TessieBinarySensorEntity(TessieEntity, BinarySensorEntity):
class TessieEnergyLiveBinarySensorEntity(TessieEnergyEntity, BinarySensorEntity):
"""Base class for Tessie energy live binary sensors."""
entity_description: BinarySensorEntityDescription
entity_description: TessieBinarySensorEntityDescription
def __init__(
self,
data: TessieEnergyData,
description: BinarySensorEntityDescription,
description: TessieBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
self.entity_description = description
assert data.live_coordinator is not None
super().__init__(data, data.live_coordinator, description.key)
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._attr_available
def _async_update_attrs(self) -> None:
"""Update the attributes of the binary sensor."""
self._attr_is_on = self._value
self._attr_available = self._value is not None
if self._attr_available:
self._attr_is_on = self.entity_description.is_on(self._value)
class TessieEnergyInfoBinarySensorEntity(TessieEnergyEntity, BinarySensorEntity):

View File

@@ -114,3 +114,28 @@ class TessieWallConnectorStates(IntEnum):
CHARGING_FINISHED = 8
WAITING_CAR = 9
CHARGING_REDUCED = 10
ENERGY_HISTORY_FIELDS = (
"solar_energy_exported",
"generator_energy_exported",
"grid_energy_imported",
"grid_services_energy_imported",
"grid_services_energy_exported",
"grid_energy_exported_from_solar",
"grid_energy_exported_from_generator",
"grid_energy_exported_from_battery",
"battery_energy_exported",
"battery_energy_imported_from_grid",
"battery_energy_imported_from_solar",
"battery_energy_imported_from_generator",
"consumer_energy_imported_from_grid",
"consumer_energy_imported_from_solar",
"consumer_energy_imported_from_battery",
"consumer_energy_imported_from_generator",
"total_home_usage",
"total_battery_charge",
"total_battery_discharge",
"total_solar_generation",
"total_grid_energy_exported",
)

View File

@@ -8,23 +8,26 @@ import logging
from typing import TYPE_CHECKING, Any
from aiohttp import ClientResponseError
from tesla_fleet_api.const import TeslaEnergyPeriod
from tesla_fleet_api.exceptions import InvalidToken, MissingToken, TeslaFleetError
from tesla_fleet_api.tessie import EnergySite
from tessie_api import get_state, get_status
from tessie_api import get_battery, get_state, get_status
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
if TYPE_CHECKING:
from . import TessieConfigEntry
from .const import TessieStatus
from .const import DOMAIN, ENERGY_HISTORY_FIELDS, TessieStatus
# This matches the update interval Tessie performs server side
TESSIE_SYNC_INTERVAL = 10
TESSIE_FLEET_API_SYNC_INTERVAL = timedelta(seconds=30)
TESSIE_ENERGY_HISTORY_INTERVAL = timedelta(seconds=60)
_LOGGER = logging.getLogger(__name__)
@@ -96,6 +99,48 @@ class TessieStateUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
return flatten(vehicle)
class TessieBatteryHealthCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to manage fetching battery health data from the Tessie API."""
config_entry: TessieConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: TessieConfigEntry,
api_key: str,
vin: str,
data: dict[str, Any],
) -> None:
"""Initialize Tessie Battery Health coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name="Tessie Battery Health",
update_interval=timedelta(seconds=TESSIE_SYNC_INTERVAL),
)
self.api_key = api_key
self.vin = vin
self.session = async_get_clientsession(hass)
self.data = data
async def _async_update_data(self) -> dict[str, Any]:
"""Update battery health data using Tessie API."""
try:
data = await get_battery(
session=self.session,
api_key=self.api_key,
vin=self.vin,
)
except ClientResponseError as e:
if e.status == HTTPStatus.UNAUTHORIZED:
raise ConfigEntryAuthFailed from e
raise UpdateFailed from e
return data
class TessieEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to manage fetching energy site live status from the Tessie API."""
@@ -171,3 +216,59 @@ class TessieEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]]):
raise UpdateFailed(e.message) from e
return flatten(data)
class TessieEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Class to manage fetching energy history from the Tessie API."""
config_entry: TessieConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: TessieConfigEntry,
api: EnergySite,
) -> None:
"""Initialize Tessie Energy History coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name="Tessie Energy History",
update_interval=TESSIE_ENERGY_HISTORY_INTERVAL,
)
self.api = api
self.data = {}
async def _async_update_data(self) -> dict[str, Any]:
"""Update energy history data using Tessie API."""
try:
data = (await self.api.energy_history(TeslaEnergyPeriod.DAY))["response"]
except (InvalidToken, MissingToken) as e:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="auth_failed",
) from e
except TeslaFleetError as e:
raise UpdateFailed(e.message) from e
if (
not data
or not isinstance(data.get("time_series"), list)
or not data["time_series"]
):
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="invalid_energy_history_data",
)
time_series = data["time_series"]
output: dict[str, Any] = {}
for key in ENERGY_HISTORY_FIELDS:
values = [p[key] for p in time_series if key in p]
output[key] = sum(values) if values else None
output["_period_start"] = dt_util.parse_datetime(time_series[0]["timestamp"])
return output

Some files were not shown because too many files have changed in this diff Show More