Compare commits

..

150 Commits

Author SHA1 Message Date
Daniel Hjelseth Høyer
a9b984d705 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-16 16:47:20 +01:00
Daniel Hjelseth Høyer
886c0578e7 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 16:18:46 +01:00
Daniel Hjelseth Høyer
02e579c5ae tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 15:38:27 +01:00
Daniel Hjelseth Høyer
d47f3ca1d8 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 15:27:07 +01:00
Daniel Hjelseth Høyer
02e5f2c234 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 15:18:07 +01:00
Daniel Hjelseth Høyer
e42195bfed tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 14:39:42 +01:00
Daniel Hjelseth Høyer
b2944a6d66 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 13:55:08 +01:00
Daniel Hjelseth Høyer
03d15fb70c tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 13:46:40 +01:00
Daniel Hjelseth Høyer
01d57ddcf1 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 13:34:15 +01:00
Daniel Hjelseth Høyer
cfc85cfd29 clean up
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 12:37:13 +01:00
Daniel Hjelseth Høyer
ca2dc20709 Refactor Tibber
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 03:22:39 +01:00
Denis Shulyaka
3840f7a767 Bump openai to 2.21.0 (#163032) 2026-02-14 20:08:45 -05:00
Jordan Harvey
af2d2a857a Add bedtime end time entity Nintendo parental controls (#160927) 2026-02-14 22:51:20 +01:00
jameson_uk
31970255a2 Add air quality monitor sensors to Alexa Devices (#162095)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2026-02-14 22:29:11 +01:00
Daniel Hjelseth Høyer
f30397a11a Update homevolt quality scale (#163022)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-14 22:05:03 +01:00
Denis Shulyaka
cbcfc43c5a Add reauthentication to Anthropic (#163019) 2026-02-14 21:53:25 +01:00
mettolen
acaa2aeeee Add switch entities to Liebherr integration (#162688) 2026-02-14 21:41:06 +01:00
Denis Shulyaka
c67c19413b Improve Anthropic coverage (#163011) 2026-02-14 21:33:53 +01:00
Paul Tarjan
8840d2f0ef Add entity descriptions to Hikvision binary sensors (#160875)
Co-authored-by: Claude <noreply@anthropic.com>
2026-02-14 21:32:39 +01:00
Daniel Hjelseth Høyer
82fb3c35dc Add zeroconf support to Homevolt (#162897)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-14 21:24:16 +01:00
Franck Nijhof
4d0d5d6817 CI security hardening actions/checkout to not persist-credentials (#162991) 2026-02-14 21:11:43 +01:00
Denis Shulyaka
12584482a2 Add data descriptions for Anthropic data flow (#162961) 2026-02-14 22:34:33 +03:00
Denis Shulyaka
b47dd2f923 Enable strict typing check for Anthropic (#163013) 2026-02-14 19:04:29 +00:00
Ludovic BOUÉ
3d354da104 Added ppm support for the ozone device class in sensor (#162996) 2026-02-14 19:57:16 +01:00
wollew
89e900dca1 add switch platform for Velux on/off switches (#163002) 2026-02-14 15:36:51 +01:00
Patrick Vorgers
675884ad78 S3 backup - Improved buffer handling (#162955) 2026-02-14 15:26:08 +01:00
Franck Nijhof
efb6cdc17e Fix failing sftp_storage test (#163000) 2026-02-14 08:12:06 -06:00
Jan Bouwhuis
aca7fe530c Fix lingering test_waiting_for_client_not_loaded test (#162994) 2026-02-14 13:55:12 +01:00
Simone Chemelli
10fa02a36c Small test cleanup for Fritz (#162993) 2026-02-14 13:41:26 +01:00
jameson_uk
5344a874b0 fix: info skill reference (#162823) 2026-02-14 13:34:59 +01:00
Glenn de Haan
ad2fe0d4d0 Add HDFury CEC and 5v switches (#162988) 2026-02-14 13:20:24 +01:00
Ludovic BOUÉ
9c275acca9 Add Matter TVOC level entity (#162964)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-14 12:50:08 +01:00
Martin Hjelmare
225ecedc95 Fix Z-Wave climate set preset (#162728) 2026-02-14 12:45:36 +01:00
Artur Pragacz
f246c90073 Move DATA_MP_ENTITIES in Onkyo (#162674) 2026-02-14 12:35:40 +01:00
Denis Shulyaka
5bf7e83e76 Anthropic: Increase max iterations for AI Task (#162954) 2026-02-14 12:33:12 +01:00
Christian Lackas
3b3f4066c3 Fix HomematicIP entity recovery after access point cloud reconnect (#162575) 2026-02-14 12:23:16 +01:00
Glenn de Haan
30e484c292 Improve quality scale to platinum HDFury integration (#162985) 2026-02-14 12:17:32 +01:00
wollew
137377b50a Refactor Velux cover class (#162984) 2026-02-14 12:16:14 +01:00
Franck Nijhof
96b98c9cb9 Remove YAML anchors / expand GitHub CI workflows (#162987) 2026-02-14 12:01:29 +01:00
MoonDevLT
7d3601aa6f Replace the manufacturer name of lunatone (#162854) 2026-02-14 11:52:39 +01:00
Anders Ödlund
2ef7f6b317 Z-Wave lock service action modernization (#162967)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-14 10:33:23 +01:00
Matthias Alphart
7c8b181e6d Update knx-frontend to 2026.2.13.222258 (#162963) 2026-02-14 10:10:50 +01:00
Thomas Rupprecht
b5147d8afa Improve SpaceAPI Manifest (#160805) 2026-02-14 09:55:55 +01:00
hanwg
dc4bc6feea Set default file transfer timeouts for Telegram bot (#162978) 2026-02-14 09:36:25 +01:00
Thomas Rupprecht
4cea3b4aac Improve types and code in SpaceAPI (#162970) 2026-02-14 09:26:25 +01:00
Brett Adams
d633a69e07 Bump python-tesla-fleet-api to v1.4.3 (#162977)
Co-authored-by: Claude Haiku 4.5 <noreply@anthropic.com>
2026-02-14 09:25:20 +01:00
AlCalzone
3e8e95f95e Handle Z-Wave values (re-)added at runtime (#162921) 2026-02-13 23:46:59 +01:00
Michael Hansen
6d66df9346 Bump intents to 2026.2.13 (#162959) 2026-02-13 23:29:55 +01:00
TheJulianJES
ed15a01a6a Fix home-assistant-intents breaking nightly builds (#162957)
Co-authored-by: Robert Resch <robert@resch.dev>
2026-02-13 23:28:36 +01:00
Glenn de Haan
462d958b7e Bump hdfury to 1.5.0 (#162944) 2026-02-13 21:42:22 +01:00
Robin Lintermann
d888579cbd Bump pysmarlaapi to 1.0.1 and compatibility changes (#162911) 2026-02-13 21:41:05 +01:00
Erik Montnemery
e16a8ed20e Don't mock out filesystem operations in backup tests (#162877) 2026-02-13 21:39:34 +01:00
YogevBokobza
b11a75d438 Add Switcher heater support (#162588)
Co-authored-by: Shay Levy <levyshay1@gmail.com>
2026-02-13 22:32:55 +02:00
Glenn de Haan
95df5b9ec9 Fix incorrect type HDFury select platform (#162948) 2026-02-13 20:50:26 +01:00
epenet
a301a9c4b6 Always include homeassistant translations in tests (#162850) 2026-02-13 20:17:48 +01:00
Thomas55555
e80bb871e4 Bump ruff to 0.15.1 (#162903) 2026-02-13 19:43:37 +01:00
epenet
ff4ff98e54 Parametrize yeelight test_device_types test (#161838) 2026-02-13 19:43:07 +01:00
wollew
88c6cb3877 add OnOffLight without brightness control to velux integration (#162835) 2026-02-13 19:42:44 +01:00
Michael
6b3a7e4cd6 Fix handling when FRITZ!Box reboots in FRITZ!Smarthome (#162676) 2026-02-13 19:41:03 +01:00
Michael
36ff7506a0 Fix handling when FRITZ!Box reboots in FRITZ!Box Tools (#162679) 2026-02-13 19:40:51 +01:00
Allen Porter
a0af35f2dc Improve MCP SSE fallback error handling (#162655) 2026-02-13 19:39:34 +01:00
Josef Zweck
c15da19b84 Log remaining token duration in onedrive (#162933) 2026-02-13 19:38:44 +01:00
Damien Sorel
23e88a24f0 Add remove item intent for todo component (#152922) 2026-02-13 19:38:22 +01:00
Robert Resch
815c708d19 Block redirect to localhost (#162941) 2026-02-13 19:31:35 +01:00
Paulus Schoutsen
f9f2f39a3c OpenAI: Increase max iterations for AI Task (#162599) 2026-02-13 13:16:26 -05:00
Erik Montnemery
490514c274 Add fixture to give tests their own unique copy of testing_config (#162938) 2026-02-13 18:07:18 +01:00
Kamil Breguła
7da339b59c Add quality scale for GIOS (#155603)
Co-authored-by: mik-laj <12058428+mik-laj@users.noreply.github.com>
2026-02-13 18:01:44 +01:00
Josef Zweck
1bb31892c2 Add integration for onedrive for business (#155709)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-13 07:01:52 -08:00
epenet
267caf2365 Use APPLICATION_CREDENTIALS_DOMAIN constant in tests (#162932) 2026-02-13 15:47:38 +01:00
Petro31
4e71a38e31 Ensure numeric template sensors only use numbers in _attr_native_state (#162878)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2026-02-13 14:14:28 +01:00
Petro31
d3d916566a Make template lock code error consistent between state based and trigger based template entities (#162923) 2026-02-13 14:13:58 +01:00
epenet
fd3258a6d3 Use constants for update_entity calls in tests (#162920) 2026-02-13 13:54:40 +01:00
Sammy [Andrei Marinache]
d1aadb5842 Add Miele TQ1000WP tumble dryer programs and program phases (#162871)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
Co-authored-by: Åke Strandberg <ake@strandberg.eu>
2026-02-13 13:53:12 +01:00
epenet
d984411911 Raise on missing supported color modes (#162717) 2026-02-13 13:39:48 +01:00
Robin Lintermann
8ed0a4cf29 Specifiy number of parallel updates in Smarla (#162914) 2026-02-13 13:24:24 +01:00
Simone Chemelli
9a407b8668 Optimize coordinator data type for UptimeRobot (#162912) 2026-02-13 13:23:59 +01:00
Robin Lintermann
72aa9d8a6a Improve smarla typing in tests (#162163) 2026-02-13 13:19:27 +01:00
Kevin Stillhammer
dc1c52622e Fix google_travel_time get_travel_times config_entry_id description (#162910)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-13 13:18:55 +01:00
peteS-UK
44d5ecc926 Replace repeated patches in config_flow_test with fixtures for Squeezebox (#153032) 2026-02-13 12:00:24 +01:00
Simone Chemelli
54b0393ebe Cleanup code for UptimeRobot (#162905) 2026-02-13 11:53:04 +01:00
epenet
54141ffd3f Drop yardian custom translation overrides in tests (#162904) 2026-02-13 10:57:17 +01:00
David Bonnes
92b823068c Move evohome service registration to services.py (#162902) 2026-02-13 10:25:03 +01:00
Norbert Rittel
d4a6377ab3 Fix capitalization of "Immich" and "MIME type" (#162900) 2026-02-13 10:00:39 +01:00
epenet
80d07c42ac Move evohome hasskey to const module (#162899) 2026-02-13 08:25:43 +00:00
puddly
077eeafa69 Bump ZHA to 0.0.90 (#162894) 2026-02-13 08:40:26 +01:00
dependabot[bot]
b6ff8c94b1 Bump docker/build-push-action from 6.19.1 to 6.19.2 (#162896)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-13 08:39:53 +01:00
Michael
6a1581f2bf Immich reached platinum 🏆 (#162891) 2026-02-13 07:45:56 +01:00
johanzander
2dc0d32a29 Implement automatic migration for Growatt Server DEFAULT_PLANT_ID entries (#159972) 2026-02-13 01:56:50 +01:00
Niracler
036696f4cd Add energy sensor platform to sunricher_dali (#161415)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-13 01:55:16 +01:00
Michael
89f5b33a5e Cache get api calls in FRITZ!Box tools (#160246)
Co-authored-by: Simone Chemelli <simone.chemelli@gmail.com>
2026-02-13 01:54:33 +01:00
Matthias Alphart
fc52885c21 Support KNX time server configuration from UI (#161854) 2026-02-13 01:52:38 +01:00
Ville Skyttä
ffa8fc583d Recorder total_increasing warning clarifications (#157453) 2026-02-13 01:47:51 +01:00
Samuel Xiao
f18fa07019 Switchbot Cloud: Add new supported device Ai Art Frame (#160754)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-13 01:47:03 +01:00
Alex Merkel
ce704dd5f7 Add play/pause ability & media info to LG soundbars integration (#161184)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-13 01:39:05 +01:00
Patrick Vorgers
d930755f92 IDrive e2 backup provider (#144910)
Co-authored-by: Josef Zweck <josef@zweck.dev>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-12 15:49:03 -08:00
epenet
196c6d9839 Do not unregister adguard services (#158308)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-13 00:40:30 +01:00
David Rapan
cce5358901 Re-implement Cloudflare using coordinator (#156817)
Signed-off-by: David Rapan <david@rapan.cz>
2026-02-13 00:33:48 +01:00
Heindrich Paul
df7c3d787d Only show trains for configured time if configured in nederlandse_spoorwegen (#159261) 2026-02-13 00:29:20 +01:00
Manu
a6287731f7 Increase test coverage in Xbox integration (#162876) 2026-02-12 15:14:07 -08:00
karwosts
1667b3f16b Add annual statistics aggregation (#160857) 2026-02-13 00:11:07 +01:00
Noah Husby
2aa9d22350 Add room correction setting to Cambridge Audio (#162743)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2026-02-12 23:10:13 +00:00
Matthias Alphart
3bcb303ef1 Support KNX number entity configuration from UI (#161269) 2026-02-13 00:06:53 +01:00
Manu
e6de37cc69 Use service helper to retrieve config entry in Duck DNS integration (#162879) 2026-02-12 23:00:23 +00:00
Jon
d10f5cc9ea Expose power and energy sensors for vera metered switches (#161028) 2026-02-12 23:56:35 +01:00
Erwin Douna
4921f05189 Disable mobile devices in tado (#160881)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 23:56:01 +01:00
Brett Adams
877ad391f0 Add config flow to Splunk (#160478)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Erwin Douna <e.douna@gmail.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 23:27:49 +01:00
n-6
8a5594b9e4 Added Ambient Weather station sensors for AQIN monitor. (#161082)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 23:27:25 +01:00
Josef Zweck
a0623d1f97 Add IQS to openai_conversation (#161051)
Co-authored-by: Robert Resch <robert@resch.dev>
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
Co-authored-by: Matthias Alphart <farmio@alphart.net>
Co-authored-by: Erwin Douna <e.douna@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Erik Montnemery <erik@montnemery.com>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 23:26:27 +01:00
Michael
c8f8ef887a Add reconfigure flow to immich (#162892) 2026-02-12 23:25:51 +01:00
Eduardo Tsen
40ec6d3793 Add switch controls for dishwashers in SmartThings (#160266)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 22:57:54 +01:00
Matthias Alphart
0a79d84f9a KNX Expose: Add support for sending value periodically (#160883) 2026-02-12 22:51:40 +01:00
Przemko92
7a7e60ce75 Add number to Compit (#162165)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-12 22:49:34 +01:00
Kurt Chrisford
6bfaf6b188 Add action exception handling to Actron Air (#160579)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 22:46:53 +01:00
Kinachi249
34a445545c Cync - allow updating multiple attributes in one command (#159574)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 22:46:30 +01:00
epenet
3c854a7679 Improve type hints in utility_meter (#160993) 2026-02-12 22:46:13 +01:00
Florent Fourcot
b7b6c1a72e Add more Melcloud sensors (#160770)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 22:39:21 +01:00
Michael
fdf02cf657 Add missing exception translations in immich (#162889) 2026-02-12 22:32:22 +01:00
Kevin Stillhammer
acf739df81 add services to google_travel_time (#160740)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 22:31:36 +01:00
Tom Matheussen
4801dcaded Add parent device for Satel Integra (#160933)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 22:29:59 +01:00
Daniel Hjelseth Høyer
11af0a2d04 Add reauthentication flow to Homevolt (#162868)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-12 22:27:30 +01:00
Michel Nederlof
40b30b94a2 Adjust discovery interval in govee-light-local (#160914)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 21:56:23 +01:00
Daniel Hjelseth Høyer
902d3f45a2 Add diagnostics to Homevolt (#162873)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-12 21:56:12 +01:00
ryanjones-gentex
bf887fbc71 Add reauth flow to HomeLink integration (#158454)
Co-authored-by: Nicholas Aelick <niaexa@syntronic.com>
2026-02-12 21:51:26 +01:00
Michael
e5ede7deea Categorize all immich sensor entities as diagnostic (#162887) 2026-02-12 21:36:45 +01:00
Erwin Douna
8b674a44a1 Melcloud move ConfigEntry declaration (#160890)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 21:32:54 +01:00
Joost Lekkerkerker
e145963d48 Remove unused snapshots for Homevolt (#162885) 2026-02-12 21:32:41 +01:00
Simone Chemelli
1bca0ba5f8 Update UptimeRobot to API v3 (#153508) 2026-02-12 21:28:11 +01:00
Anders Ödlund
38531033a1 Catch AccessoryDisconnectedError in homekit pairing (#162466)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 20:51:09 +01:00
Xitee
9f1b6a12a5 Filter out transient zero values from qBittorrent alltime stats (#162821)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-12 18:45:42 +01:00
Daniel Hjelseth Høyer
876589f0cd Fix keys for Homevolt (#162874)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-12 18:42:50 +01:00
Przemko92
bd09ac9030 Add water heater support for Compit (#162021)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 18:39:02 +01:00
wollew
6d143c1ce2 add quality scale to manifest of velux integration (#162869) 2026-02-12 18:38:49 +01:00
Artur Pragacz
f4ceb22d73 Add analytics platform to mobile_app (#162736) 2026-02-12 17:09:40 +01:00
Manu
5839191c37 Move entity service registration to async_setup in ntfy integration (#162833) 2026-02-12 16:42:15 +01:00
Manu
29feccb190 Improve tests in Bring! integration (#162853)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-02-12 16:41:08 +01:00
epenet
a017417849 Use service helper to extract habitica config entry (#162795)
Co-authored-by: Manu <4445816+tr4nt0r@users.noreply.github.com>
2026-02-12 16:39:03 +01:00
Aron List
72a7d708b0 Expose ActuatorEnabled attr of matter DoorLock (#162598) 2026-02-12 16:03:30 +01:00
epenet
47be13e6bf Improve error validation in service tests (#162851) 2026-02-12 06:34:31 -08:00
ElCruncharino
7d583be8e1 Add timeout to B2 metadata downloads to prevent backup hang (#162562) 2026-02-12 06:26:50 -08:00
Manu
ccb3b35694 Use https for media player cover images in Xbox integration (#162859) 2026-02-12 05:59:28 -08:00
Steve Easley
48893d4daa Add JVC Projector switch platform (#161899)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-02-12 14:21:34 +01:00
Yoshi Walsh
0388e5dd7f Bump pydaikin to 2.17.2 (#162846) 2026-02-12 14:18:46 +01:00
Marc Hörsken
7a68903318 Bump pywmspro to 0.3.3 (#162832) 2026-02-12 14:18:19 +01:00
Anders Ödlund
64766100fe Add get_lock_usercode service to zwave_js integration (#162057)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-02-12 13:44:33 +01:00
Abílio Costa
0576dd91b7 Validate core_files.yaml base_platforms completeness (#162826) 2026-02-12 11:59:19 +00:00
Jon Seager
f4440e992f Bump pytouchlinesl to 0.6.0 (#162856) 2026-02-12 12:42:36 +01:00
Daniel Hjelseth Høyer
ea83b5a892 Add Homevolt battery integration (#160416)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-02-12 12:39:58 +01:00
Vicx
d148952c99 Bump slixmpp to 1.13.2 (#162837) 2026-02-12 11:48:48 +01:00
epenet
ed9a810908 Fix unavailable status in Tuya (#162709) 2026-02-12 11:46:40 +01:00
Peter Kolbus
6960cd6853 Bump pyweatherflowudp to 1.5.0 (#162841)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-02-12 11:43:01 +01:00
644 changed files with 31596 additions and 5533 deletions

View File

@@ -22,6 +22,7 @@ base_platforms: &base_platforms
- homeassistant/components/calendar/**
- homeassistant/components/camera/**
- homeassistant/components/climate/**
- homeassistant/components/conversation/**
- homeassistant/components/cover/**
- homeassistant/components/date/**
- homeassistant/components/datetime/**
@@ -53,6 +54,7 @@ base_platforms: &base_platforms
- homeassistant/components/update/**
- homeassistant/components/vacuum/**
- homeassistant/components/valve/**
- homeassistant/components/wake_word/**
- homeassistant/components/water_heater/**
- homeassistant/components/weather/**
@@ -70,7 +72,6 @@ components: &components
- homeassistant/components/cloud/**
- homeassistant/components/config/**
- homeassistant/components/configurator/**
- homeassistant/components/conversation/**
- homeassistant/components/demo/**
- homeassistant/components/device_automation/**
- homeassistant/components/dhcp/**

View File

@@ -31,6 +31,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -97,6 +99,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -165,7 +169,7 @@ jobs:
sed -i "s|home-assistant-intents==.*|home-assistant-intents==${BASH_REMATCH[1]}|" \
homeassistant/package_constraints.txt
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
sed -i "s|home-assistant-intents==.*||" requirements_all.txt requirements.txt
fi
- name: Download translations
@@ -190,8 +194,7 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- &install_cosign
name: Install Cosign
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
@@ -225,7 +228,7 @@ jobs:
- name: Build base image
id: build
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6.19.1
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: .
file: ./Dockerfile
@@ -275,6 +278,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set build additional args
run: |
@@ -313,6 +318,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -355,7 +362,10 @@ jobs:
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- *install_cosign
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
- name: Login to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
@@ -476,6 +486,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -521,6 +533,8 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
@@ -530,7 +544,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Docker image
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6.19.1
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile
@@ -543,7 +557,7 @@ jobs:
- name: Push Docker image
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
id: push
uses: docker/build-push-action@601a80b39c9405e50806ae38af30926f9d957c47 # v6.19.1
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
with:
context: . # So action will not pull the repository again
file: ./script/hassfest/docker/Dockerfile

File diff suppressed because it is too large Load Diff

View File

@@ -22,6 +22,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Initialize CodeQL
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2

View File

@@ -20,6 +20,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0

View File

@@ -29,9 +29,10 @@ jobs:
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
steps:
- &checkout
name: Checkout the repository
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
@@ -74,7 +75,7 @@ jobs:
) > .env_file
- name: Upload env_file
uses: &actions-upload-artifact actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: env_file
path: ./.env_file
@@ -82,7 +83,7 @@ jobs:
overwrite: true
- name: Upload requirements_diff
uses: *actions-upload-artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: requirements_diff
path: ./requirements_diff.txt
@@ -94,7 +95,7 @@ jobs:
python -m script.gen_requirements_all ci
- name: Upload requirements_all_wheels
uses: *actions-upload-artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: requirements_all_wheels
path: ./requirements_all_wheels_*.txt
@@ -106,7 +107,7 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix: &matrix-build
matrix:
abi: ["cp313", "cp314"]
arch: ["amd64", "aarch64"]
include:
@@ -115,17 +116,18 @@ jobs:
- arch: aarch64
os: ubuntu-24.04-arm
steps:
- *checkout
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- &download-env-file
name: Download env_file
uses: &actions-download-artifact actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
- name: Download env_file
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: env_file
- &download-requirements-diff
name: Download requirements_diff
uses: *actions-download-artifact
- name: Download requirements_diff
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: requirements_diff
@@ -136,7 +138,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: &home-assistant-wheels home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
uses: home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -156,16 +158,32 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix: *matrix-build
matrix:
abi: ["cp313", "cp314"]
arch: ["amd64", "aarch64"]
include:
- arch: amd64
os: ubuntu-latest
- arch: aarch64
os: ubuntu-24.04-arm
steps:
- *checkout
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- *download-env-file
- name: Download env_file
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: env_file
- *download-requirements-diff
- name: Download requirements_diff
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: requirements_diff
- name: Download requirements_all_wheels
uses: *actions-download-artifact
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: requirements_all_wheels
@@ -178,7 +196,7 @@ jobs:
sed -i "/uv/d" requirements_diff.txt
- name: Build wheels
uses: *home-assistant-wheels
uses: home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.15.0
rev: v0.15.1
hooks:
- id: ruff-check
args:

View File

@@ -84,6 +84,7 @@ homeassistant.components.androidtv_remote.*
homeassistant.components.anel_pwrctrl.*
homeassistant.components.anova.*
homeassistant.components.anthemav.*
homeassistant.components.anthropic.*
homeassistant.components.apache_kafka.*
homeassistant.components.apcupsd.*
homeassistant.components.api.*
@@ -242,6 +243,7 @@ homeassistant.components.guardian.*
homeassistant.components.habitica.*
homeassistant.components.hardkernel.*
homeassistant.components.hardware.*
homeassistant.components.hdfury.*
homeassistant.components.heos.*
homeassistant.components.here_travel_time.*
homeassistant.components.history.*
@@ -385,6 +387,7 @@ homeassistant.components.ohme.*
homeassistant.components.onboarding.*
homeassistant.components.oncue.*
homeassistant.components.onedrive.*
homeassistant.components.onedrive_for_business.*
homeassistant.components.onewire.*
homeassistant.components.onkyo.*
homeassistant.components.open_meteo.*

7
CODEOWNERS generated
View File

@@ -719,6 +719,8 @@ build.json @home-assistant/supervisor
/tests/components/homematic/ @pvizeli
/homeassistant/components/homematicip_cloud/ @hahn-th @lackas
/tests/components/homematicip_cloud/ @hahn-th @lackas
/homeassistant/components/homevolt/ @danielhiversen
/tests/components/homevolt/ @danielhiversen
/homeassistant/components/homewizard/ @DCSBL
/tests/components/homewizard/ @DCSBL
/homeassistant/components/honeywell/ @rdfurman @mkmer
@@ -760,6 +762,8 @@ build.json @home-assistant/supervisor
/tests/components/icloud/ @Quentame @nzapponi
/homeassistant/components/idasen_desk/ @abmantis
/tests/components/idasen_desk/ @abmantis
/homeassistant/components/idrive_e2/ @patrickvorgers
/tests/components/idrive_e2/ @patrickvorgers
/homeassistant/components/igloohome/ @keithle888
/tests/components/igloohome/ @keithle888
/homeassistant/components/ign_sismologia/ @exxamalte
@@ -1176,6 +1180,8 @@ build.json @home-assistant/supervisor
/tests/components/ondilo_ico/ @JeromeHXP
/homeassistant/components/onedrive/ @zweckj
/tests/components/onedrive/ @zweckj
/homeassistant/components/onedrive_for_business/ @zweckj
/tests/components/onedrive_for_business/ @zweckj
/homeassistant/components/onewire/ @garbled1 @epenet
/tests/components/onewire/ @garbled1 @epenet
/homeassistant/components/onkyo/ @arturpragacz @eclair4151
@@ -1569,6 +1575,7 @@ build.json @home-assistant/supervisor
/homeassistant/components/speedtestdotnet/ @rohankapoorcom @engrbm87
/tests/components/speedtestdotnet/ @rohankapoorcom @engrbm87
/homeassistant/components/splunk/ @Bre77
/tests/components/splunk/ @Bre77
/homeassistant/components/spotify/ @frenck @joostlek
/tests/components/spotify/ @frenck @joostlek
/homeassistant/components/sql/ @gjohansson-ST @dougiteixeira

View File

@@ -13,6 +13,7 @@
"microsoft",
"msteams",
"onedrive",
"onedrive_for_business",
"xbox"
]
}

View File

@@ -18,7 +18,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
from .entity import ActronAirAcEntity, ActronAirZoneEntity
from .entity import ActronAirAcEntity, ActronAirZoneEntity, handle_actron_api_errors
PARALLEL_UPDATES = 0
@@ -136,16 +136,19 @@ class ActronSystemClimate(ActronAirAcEntity, ActronAirClimateEntity):
"""Return the target temperature."""
return self._status.user_aircon_settings.temperature_setpoint_cool_c
@handle_actron_api_errors
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set a new fan mode."""
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode)
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
@handle_actron_api_errors
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
await self._status.ac_system.set_system_mode(ac_mode)
@handle_actron_api_errors
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
@@ -209,11 +212,13 @@ class ActronZoneClimate(ActronAirZoneEntity, ActronAirClimateEntity):
"""Return the target temperature."""
return self._zone.temperature_setpoint_cool_c
@handle_actron_api_errors
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set the HVAC mode."""
is_enabled = hvac_mode != HVACMode.OFF
await self._zone.enable(is_enabled)
@handle_actron_api_errors
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set the temperature."""
await self._zone.set_temperature(temperature=kwargs.get(ATTR_TEMPERATURE))

View File

@@ -1,7 +1,12 @@
"""Base entity classes for Actron Air integration."""
from actron_neo_api import ActronAirZone
from collections.abc import Callable, Coroutine
from functools import wraps
from typing import Any, Concatenate
from actron_neo_api import ActronAirAPIError, ActronAirZone
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
@@ -9,6 +14,26 @@ from .const import DOMAIN
from .coordinator import ActronAirSystemCoordinator
def handle_actron_api_errors[_EntityT: ActronAirEntity, **_P](
func: Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, Any]],
) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]:
"""Decorate Actron Air API calls to handle ActronAirAPIError exceptions."""
@wraps(func)
async def wrapper(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None:
"""Wrap API calls with exception handling."""
try:
await func(self, *args, **kwargs)
except ActronAirAPIError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",
translation_placeholders={"error": str(err)},
) from err
return wrapper
class ActronAirEntity(CoordinatorEntity[ActronAirSystemCoordinator]):
"""Base class for Actron Air entities."""

View File

@@ -26,7 +26,7 @@ rules:
unique-config-entry: done
# Silver
action-exceptions: todo
action-exceptions: done
config-entry-unloading: done
docs-configuration-parameters:
status: exempt

View File

@@ -49,6 +49,9 @@
}
},
"exceptions": {
"api_error": {
"message": "Failed to communicate with Actron Air device: {error}"
},
"auth_error": {
"message": "Authentication failed, please reauthenticate"
},

View File

@@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
from .entity import ActronAirAcEntity
from .entity import ActronAirAcEntity, handle_actron_api_errors
PARALLEL_UPDATES = 0
@@ -105,10 +105,12 @@ class ActronAirSwitch(ActronAirAcEntity, SwitchEntity):
"""Return true if the switch is on."""
return self.entity_description.is_on_fn(self.coordinator)
@handle_actron_api_errors
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self.entity_description.set_fn(self.coordinator, True)
@handle_actron_api_errors
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self.entity_description.set_fn(self.coordinator, False)

View File

@@ -20,9 +20,10 @@ from homeassistant.const import (
Platform,
)
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryNotReady, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import ConfigType
from .const import (
CONF_FORCE,
@@ -45,6 +46,7 @@ SERVICE_REFRESH_SCHEMA = vol.Schema(
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
PLATFORMS = [Platform.SENSOR, Platform.SWITCH, Platform.UPDATE]
type AdGuardConfigEntry = ConfigEntry[AdGuardData]
@@ -57,6 +59,69 @@ class AdGuardData:
version: str
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the component."""
def _get_adguard_instances(hass: HomeAssistant) -> list[AdGuardHome]:
"""Get the AdGuardHome instances."""
entries: list[AdGuardConfigEntry] = hass.config_entries.async_loaded_entries(
DOMAIN
)
if not entries:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="config_entry_not_loaded"
)
return [entry.runtime_data.client for entry in entries]
async def add_url(call: ServiceCall) -> None:
"""Service call to add a new filter subscription to AdGuard Home."""
for adguard in _get_adguard_instances(call.hass):
await adguard.filtering.add_url(
allowlist=False, name=call.data[CONF_NAME], url=call.data[CONF_URL]
)
async def remove_url(call: ServiceCall) -> None:
"""Service call to remove a filter subscription from AdGuard Home."""
for adguard in _get_adguard_instances(call.hass):
await adguard.filtering.remove_url(allowlist=False, url=call.data[CONF_URL])
async def enable_url(call: ServiceCall) -> None:
"""Service call to enable a filter subscription in AdGuard Home."""
for adguard in _get_adguard_instances(call.hass):
await adguard.filtering.enable_url(allowlist=False, url=call.data[CONF_URL])
async def disable_url(call: ServiceCall) -> None:
"""Service call to disable a filter subscription in AdGuard Home."""
for adguard in _get_adguard_instances(call.hass):
await adguard.filtering.disable_url(
allowlist=False, url=call.data[CONF_URL]
)
async def refresh(call: ServiceCall) -> None:
"""Service call to refresh the filter subscriptions in AdGuard Home."""
for adguard in _get_adguard_instances(call.hass):
await adguard.filtering.refresh(
allowlist=False, force=call.data[CONF_FORCE]
)
hass.services.async_register(
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
"""Set up AdGuard Home from a config entry."""
session = async_get_clientsession(hass, entry.data[CONF_VERIFY_SSL])
@@ -79,56 +144,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> b
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
async def add_url(call: ServiceCall) -> None:
"""Service call to add a new filter subscription to AdGuard Home."""
await adguard.filtering.add_url(
allowlist=False, name=call.data[CONF_NAME], url=call.data[CONF_URL]
)
async def remove_url(call: ServiceCall) -> None:
"""Service call to remove a filter subscription from AdGuard Home."""
await adguard.filtering.remove_url(allowlist=False, url=call.data[CONF_URL])
async def enable_url(call: ServiceCall) -> None:
"""Service call to enable a filter subscription in AdGuard Home."""
await adguard.filtering.enable_url(allowlist=False, url=call.data[CONF_URL])
async def disable_url(call: ServiceCall) -> None:
"""Service call to disable a filter subscription in AdGuard Home."""
await adguard.filtering.disable_url(allowlist=False, url=call.data[CONF_URL])
async def refresh(call: ServiceCall) -> None:
"""Service call to refresh the filter subscriptions in AdGuard Home."""
await adguard.filtering.refresh(allowlist=False, force=call.data[CONF_FORCE])
hass.services.async_register(
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
)
hass.services.async_register(
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: AdGuardConfigEntry) -> bool:
"""Unload AdGuard Home config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if not hass.config_entries.async_loaded_entries(DOMAIN):
# This is the last loaded instance of AdGuard, deregister any services
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
hass.services.async_remove(DOMAIN, SERVICE_ENABLE_URL)
hass.services.async_remove(DOMAIN, SERVICE_DISABLE_URL)
hass.services.async_remove(DOMAIN, SERVICE_REFRESH)
return unload_ok
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -76,6 +76,11 @@
}
}
},
"exceptions": {
"config_entry_not_loaded": {
"message": "Config entry not loaded."
}
},
"services": {
"add_url": {
"description": "Adds a new filter subscription to AdGuard Home.",

View File

@@ -1,4 +1,11 @@
{
"entity": {
"sensor": {
"voc_index": {
"default": "mdi:molecule"
}
}
},
"services": {
"send_info_skill": {
"service": "mdi:information"

View File

@@ -20,7 +20,13 @@ from homeassistant.components.sensor import (
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import LIGHT_LUX, UnitOfTemperature
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
LIGHT_LUX,
PERCENTAGE,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -77,6 +83,41 @@ SENSORS: Final = (
native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="PM10",
device_class=SensorDeviceClass.PM10,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="PM25",
device_class=SensorDeviceClass.PM25,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="CO",
device_class=SensorDeviceClass.CO,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="VOC",
# No device class as this is an index not a concentration
state_class=SensorStateClass.MEASUREMENT,
translation_key="voc_index",
),
AmazonSensorEntityDescription(
key="Air Quality",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
),
)
NOTIFICATIONS: Final = (
AmazonNotificationEntityDescription(

View File

@@ -104,7 +104,7 @@ async def _async_execute_action(call: ServiceCall, attribute: str) -> None:
translation_placeholders={"info_skill": value},
)
await coordinator.api.call_alexa_info_skill(
coordinator.data[device.serial_number], value
coordinator.data[device.serial_number], info_skill
)

View File

@@ -75,6 +75,9 @@
},
"timer": {
"name": "Next timer"
},
"voc_index": {
"name": "Volatile organic compounds index"
}
},
"switch": {

View File

@@ -33,13 +33,19 @@ from .const import ATTR_LAST_DATA, TYPE_SOLARRADIATION, TYPE_SOLARRADIATION_LX
from .entity import AmbientWeatherEntity
TYPE_24HOURRAININ = "24hourrainin"
TYPE_AQI_PM10_24H_AQIN = "aqi_pm10_24h_aqin"
TYPE_AQI_PM10_AQIN = "aqi_pm10_aqin"
TYPE_AQI_PM25 = "aqi_pm25"
TYPE_AQI_PM25_24H = "aqi_pm25_24h"
TYPE_AQI_PM25_24H_AQIN = "aqi_pm25_24h_aqin"
TYPE_AQI_PM25_AQIN = "aqi_pm25_aqin"
TYPE_AQI_PM25_IN = "aqi_pm25_in"
TYPE_AQI_PM25_IN_24H = "aqi_pm25_in_24h"
TYPE_BAROMABSIN = "baromabsin"
TYPE_BAROMRELIN = "baromrelin"
TYPE_CO2 = "co2"
TYPE_CO2_IN_24H_AQIN = "co2_in_24h_aqin"
TYPE_CO2_IN_AQIN = "co2_in_aqin"
TYPE_DAILYRAININ = "dailyrainin"
TYPE_DEWPOINT = "dewPoint"
TYPE_EVENTRAININ = "eventrainin"
@@ -57,17 +63,23 @@ TYPE_HUMIDITY7 = "humidity7"
TYPE_HUMIDITY8 = "humidity8"
TYPE_HUMIDITY9 = "humidity9"
TYPE_HUMIDITYIN = "humidityin"
TYPE_LASTLIGHTNING = "lightning_time"
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
TYPE_LASTRAIN = "lastRain"
TYPE_LIGHTNING_PER_DAY = "lightning_day"
TYPE_LIGHTNING_PER_HOUR = "lightning_hour"
TYPE_LASTLIGHTNING_DISTANCE = "lightning_distance"
TYPE_LASTLIGHTNING = "lightning_time"
TYPE_MAXDAILYGUST = "maxdailygust"
TYPE_MONTHLYRAININ = "monthlyrainin"
TYPE_PM_IN_HUMIDITY_AQIN = "pm_in_humidity_aqin"
TYPE_PM_IN_TEMP_AQIN = "pm_in_temp_aqin"
TYPE_PM10_IN_24H_AQIN = "pm10_in_24h_aqin"
TYPE_PM10_IN_AQIN = "pm10_in_aqin"
TYPE_PM25 = "pm25"
TYPE_PM25_24H = "pm25_24h"
TYPE_PM25_IN = "pm25_in"
TYPE_PM25_IN_24H = "pm25_in_24h"
TYPE_PM25_IN_24H_AQIN = "pm25_in_24h_aqin"
TYPE_PM25_IN_AQIN = "pm25_in_aqin"
TYPE_SOILHUM1 = "soilhum1"
TYPE_SOILHUM10 = "soilhum10"
TYPE_SOILHUM2 = "soilhum2"
@@ -78,8 +90,8 @@ TYPE_SOILHUM6 = "soilhum6"
TYPE_SOILHUM7 = "soilhum7"
TYPE_SOILHUM8 = "soilhum8"
TYPE_SOILHUM9 = "soilhum9"
TYPE_SOILTEMP1F = "soiltemp1f"
TYPE_SOILTEMP10F = "soiltemp10f"
TYPE_SOILTEMP1F = "soiltemp1f"
TYPE_SOILTEMP2F = "soiltemp2f"
TYPE_SOILTEMP3F = "soiltemp3f"
TYPE_SOILTEMP4F = "soiltemp4f"
@@ -143,6 +155,86 @@ SENSOR_DESCRIPTIONS = (
translation_key="pm25_indoor_aqi_24h_average",
device_class=SensorDeviceClass.AQI,
),
SensorEntityDescription(
key=TYPE_PM25_IN_AQIN,
translation_key="pm25_indoor_aqin",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_PM25_IN_24H_AQIN,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
translation_key="pm25_indoor_24h_aqin",
device_class=SensorDeviceClass.PM25,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_PM10_IN_AQIN,
translation_key="pm10_indoor_aqin",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM10,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_PM10_IN_24H_AQIN,
translation_key="pm10_indoor_24h_aqin",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
device_class=SensorDeviceClass.PM10,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_CO2_IN_AQIN,
translation_key="co2_indoor_aqin",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_CO2_IN_24H_AQIN,
translation_key="co2_indoor_24h_aqin",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
device_class=SensorDeviceClass.CO2,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_PM_IN_TEMP_AQIN,
translation_key="pm_indoor_temp_aqin",
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_PM_IN_HUMIDITY_AQIN,
translation_key="pm_indoor_humidity_aqin",
native_unit_of_measurement=PERCENTAGE,
device_class=SensorDeviceClass.HUMIDITY,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_AQI_PM25_AQIN,
translation_key="pm25_aqi_aqin",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_AQI_PM25_24H_AQIN,
translation_key="pm25_aqi_24h_aqin",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_AQI_PM10_AQIN,
translation_key="pm10_aqi_aqin",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_AQI_PM10_24H_AQIN,
translation_key="pm10_aqi_24h_aqin",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
),
SensorEntityDescription(
key=TYPE_BAROMABSIN,
translation_key="absolute_pressure",

View File

@@ -156,6 +156,12 @@
"absolute_pressure": {
"name": "Absolute pressure"
},
"co2_indoor_24h_aqin": {
"name": "CO2 Indoor 24h Average AQIN"
},
"co2_indoor_aqin": {
"name": "CO2 Indoor AQIN"
},
"daily_rain": {
"name": "Daily rain"
},
@@ -228,18 +234,39 @@
"monthly_rain": {
"name": "Monthly rain"
},
"pm10_aqi_24h_aqin": {
"name": "PM10 Indoor AQI 24h Average AQIN"
},
"pm10_aqi_aqin": {
"name": "PM10 Indoor AQI AQIN"
},
"pm10_indoor_24h_aqin": {
"name": "PM10 Indoor 24h Average AQIN"
},
"pm10_indoor_aqin": {
"name": "PM10 Indoor AQIN"
},
"pm25_24h_average": {
"name": "PM2.5 24 hour average"
},
"pm25_aqi": {
"name": "PM2.5 AQI"
},
"pm25_aqi_24h_aqin": {
"name": "PM2.5 Indoor AQI 24h Average AQIN"
},
"pm25_aqi_24h_average": {
"name": "PM2.5 AQI 24 hour average"
},
"pm25_aqi_aqin": {
"name": "PM2.5 Indoor AQI AQIN"
},
"pm25_indoor": {
"name": "PM2.5 indoor"
},
"pm25_indoor_24h_aqin": {
"name": "PM2.5 Indoor 24h AQIN"
},
"pm25_indoor_24h_average": {
"name": "PM2.5 indoor 24 hour average"
},
@@ -249,6 +276,15 @@
"pm25_indoor_aqi_24h_average": {
"name": "PM2.5 indoor AQI"
},
"pm25_indoor_aqin": {
"name": "PM2.5 Indoor AQIN"
},
"pm_indoor_humidity_aqin": {
"name": "Indoor Humidity AQIN"
},
"pm_indoor_temp_aqin": {
"name": "Indoor Temperature AQIN"
},
"relative_pressure": {
"name": "Relative pressure"
},

View File

@@ -7,7 +7,7 @@ import anthropic
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
@@ -47,8 +47,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
try:
await client.models.list(timeout=10.0)
except anthropic.AuthenticationError as err:
LOGGER.error("Invalid API key: %s", err)
return False
raise ConfigEntryAuthFailed(err) from err
except anthropic.AnthropicError as err:
raise ConfigEntryNotReady(err) from err
@@ -77,7 +76,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
"""Unload Anthropic."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@@ -105,7 +104,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
if not any(entry.version == 1 for entry in entries):
return
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
api_keys_entries: dict[str, tuple[AnthropicConfigEntry, bool]] = {}
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)

View File

@@ -4,9 +4,9 @@ from __future__ import annotations
from json import JSONDecodeError
import logging
from typing import TYPE_CHECKING
from homeassistant.components import ai_task, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -14,12 +14,15 @@ from homeassistant.util.json import json_loads
from .entity import AnthropicBaseLLMEntity
if TYPE_CHECKING:
from . import AnthropicConfigEntry
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: AnthropicConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up AI Task entities."""
@@ -50,7 +53,9 @@ class AnthropicTaskEntity(
chat_log: conversation.ChatLog,
) -> ai_task.GenDataTaskResult:
"""Handle a generate data task."""
await self._async_handle_chat_log(chat_log, task.name, task.structure)
await self._async_handle_chat_log(
chat_log, task.name, task.structure, max_iterations=1000
)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise HomeAssistantError(

View File

@@ -2,10 +2,11 @@
from __future__ import annotations
from collections.abc import Mapping
import json
import logging
import re
from typing import Any, cast
from typing import TYPE_CHECKING, Any, cast
import anthropic
import voluptuous as vol
@@ -13,7 +14,7 @@ from voluptuous_openapi import convert
from homeassistant.components.zone import ENTITY_ID_HOME
from homeassistant.config_entries import (
ConfigEntry,
SOURCE_REAUTH,
ConfigEntryState,
ConfigFlow,
ConfigFlowResult,
@@ -65,6 +66,9 @@ from .const import (
WEB_SEARCH_UNSUPPORTED_MODELS,
)
if TYPE_CHECKING:
from . import AnthropicConfigEntry
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
@@ -162,6 +166,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if self.source == SOURCE_REAUTH:
return self.async_update_reload_and_abort(
self._get_reauth_entry(), data_updates=user_input
)
return self.async_create_entry(
title="Claude",
data=user_input,
@@ -182,13 +190,34 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors or None,
description_placeholders={
"instructions_url": "https://www.home-assistant.io/integrations/anthropic/#generating-an-api-key",
},
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
if not user_input:
return self.async_show_form(
step_id="reauth_confirm", data_schema=STEP_USER_DATA_SCHEMA
)
return await self.async_step_user(user_input)
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
cls, config_entry: AnthropicConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {

View File

@@ -599,6 +599,7 @@ class AnthropicBaseLLMEntity(Entity):
chat_log: conversation.ChatLog,
structure_name: str | None = None,
structure: vol.Schema | None = None,
max_iterations: int = MAX_TOOL_ITERATIONS,
) -> None:
"""Generate an answer for the chat log."""
options = self.subentry.data
@@ -770,7 +771,7 @@ class AnthropicBaseLLMEntity(Entity):
client = self.entry.runtime_data
# To prevent infinite loops, we limit the number of iterations
for _iteration in range(MAX_TOOL_ITERATIONS):
for _iteration in range(max_iterations):
try:
stream = await client.messages.create(**model_args)

View File

@@ -3,13 +3,13 @@
from __future__ import annotations
from collections.abc import Iterator
from typing import cast
from typing import TYPE_CHECKING, cast
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import RepairsFlow
from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigSubentry
from homeassistant.config_entries import ConfigEntryState, ConfigSubentry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
@@ -23,6 +23,9 @@ from .const import (
DOMAIN,
)
if TYPE_CHECKING:
from . import AnthropicConfigEntry
class ModelDeprecatedRepairFlow(RepairsFlow):
"""Handler for an issue fixing flow."""
@@ -110,7 +113,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
async def _async_next_target(
self,
) -> tuple[ConfigEntry, ConfigSubentry, str] | None:
) -> tuple[AnthropicConfigEntry, ConfigSubentry, str] | None:
"""Return the next deprecated subentry target."""
if self._subentry_iter is None:
self._subentry_iter = self._iter_deprecated_subentries()

View File

@@ -1,7 +1,8 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
},
"error": {
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
@@ -10,10 +11,23 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "[%key:component::anthropic::config::step::user::data_description::api_key%]"
},
"description": "Reauthentication required. Please enter your updated API key."
},
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
}
},
"data_description": {
"api_key": "Your Anthropic API key."
},
"description": "Set up Anthropic integration by providing your Anthropic API key. Instructions to obtain an API key can be found in [the documentation]({instructions_url})."
}
}
},
@@ -35,6 +49,11 @@
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"data_description": {
"chat_model": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::chat_model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
},
"init": {
@@ -42,6 +61,10 @@
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
},
"data_description": {
"name": "[%key:component::anthropic::config_subentries::conversation::step::init::data_description::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data_description::recommended%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
},
"model": {
@@ -80,6 +103,11 @@
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature"
},
"data_description": {
"chat_model": "The model to serve the responses.",
"max_tokens": "Limit the number of response tokens.",
"temperature": "Control the randomness of the response, trading off between creativity and coherence."
},
"title": "Advanced settings"
},
"init": {
@@ -90,7 +118,10 @@
"recommended": "Recommended model settings"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
"llm_hass_api": "Allow the LLM to control Home Assistant.",
"name": "The name of this configuration",
"prompt": "Instruct how the LLM should respond. This can be a template.",
"recommended": "Use default configuration"
},
"title": "Basic settings"
},
@@ -122,6 +153,9 @@
"data": {
"chat_model": "[%key:common::generic::model%]"
},
"data_description": {
"chat_model": "Select the new model to use."
},
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
"title": "Update model"
}

View File

@@ -5,7 +5,7 @@ import functools
import json
import logging
from time import time
from typing import Any
from typing import Any, cast
from botocore.exceptions import BotoCoreError
@@ -189,48 +189,68 @@ class S3BackupAgent(BackupAgent):
)
upload_id = multipart_upload["UploadId"]
try:
parts = []
parts: list[dict[str, Any]] = []
part_number = 1
buffer_size = 0 # bytes
buffer: list[bytes] = []
buffer = bytearray() # bytes buffer to store the data
offset = 0 # start index of unread data inside buffer
stream = await open_stream()
async for chunk in stream:
buffer_size += len(chunk)
buffer.append(chunk)
buffer.extend(chunk)
# If buffer size meets minimum part size, upload it as a part
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
_LOGGER.debug(
"Uploading part number %d, size %d", part_number, buffer_size
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
buffer_size = 0
buffer = []
# Upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
# all non-trailing parts have the same size (defensive implementation)
view = memoryview(buffer)
try:
while len(buffer) - offset >= MULTIPART_MIN_PART_SIZE_BYTES:
start = offset
end = offset + MULTIPART_MIN_PART_SIZE_BYTES
part_data = view[start:end]
offset = end
_LOGGER.debug(
"Uploading part number %d, size %d",
part_number,
len(part_data),
)
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=part_data.tobytes(),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
finally:
view.release()
# Compact the buffer if the consumed offset has grown large enough. This
# avoids unnecessary memory copies when compacting after every part upload.
if offset and offset >= MULTIPART_MIN_PART_SIZE_BYTES:
buffer = bytearray(buffer[offset:])
offset = 0
# Upload the final buffer as the last part (no minimum size requirement)
if buffer:
# Offset should be 0 after the last compaction, but we use it as the start
# index to be defensive in case the buffer was not compacted.
if offset < len(buffer):
remaining_data = memoryview(buffer)[offset:]
_LOGGER.debug(
"Uploading final part number %d, size %d", part_number, buffer_size
"Uploading final part number %d, size %d",
part_number,
len(remaining_data),
)
part = await self._client.upload_part(
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
Body=remaining_data.tobytes(),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
await self._client.complete_multipart_upload(
await cast(Any, self._client).complete_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
UploadId=upload_id,

View File

@@ -16,12 +16,18 @@ CONNECTION_TIMEOUT = 120 # 2 minutes
# Default TIMEOUT_FOR_UPLOAD is 128 seconds, which is too short for large backups
TIMEOUT_FOR_UPLOAD = 43200 # 12 hours
# Reduced retry count for download operations
# Default is 20 retries with exponential backoff, which can hang for 30+ minutes
# when there are persistent connection errors (e.g., SSL failures)
TRY_COUNT_DOWNLOAD = 3
class B2Http(BaseB2Http): # type: ignore[misc]
"""B2Http with extended timeouts for backup operations."""
CONNECTION_TIMEOUT = CONNECTION_TIMEOUT
TIMEOUT_FOR_UPLOAD = TIMEOUT_FOR_UPLOAD
TRY_COUNT_DOWNLOAD = TRY_COUNT_DOWNLOAD
class B2Session(BaseB2Session): # type: ignore[misc]

View File

@@ -40,6 +40,10 @@ CACHE_TTL = 300
# This prevents uploads from hanging indefinitely
UPLOAD_TIMEOUT = 43200 # 12 hours (matches B2 HTTP timeout)
# Timeout for metadata download operations (in seconds)
# This prevents the backup system from hanging when B2 connections fail
METADATA_DOWNLOAD_TIMEOUT = 60
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
"""Return the suggested filenames for the backup and metadata files."""
@@ -413,12 +417,21 @@ class BackblazeBackupAgent(BackupAgent):
backups = {}
for file_name, file_version in all_files_in_prefix.items():
if file_name.endswith(METADATA_FILE_SUFFIX):
backup = await self._hass.async_add_executor_job(
self._process_metadata_file_sync,
file_name,
file_version,
all_files_in_prefix,
)
try:
backup = await asyncio.wait_for(
self._hass.async_add_executor_job(
self._process_metadata_file_sync,
file_name,
file_version,
all_files_in_prefix,
),
timeout=METADATA_DOWNLOAD_TIMEOUT,
)
except TimeoutError:
_LOGGER.warning(
"Timeout downloading metadata file %s", file_name
)
continue
if backup:
backups[backup.backup_id] = backup
self._backup_list_cache = backups
@@ -442,10 +455,18 @@ class BackblazeBackupAgent(BackupAgent):
if not file or not metadata_file_version:
raise BackupNotFound(f"Backup {backup_id} not found")
metadata_content = await self._hass.async_add_executor_job(
self._download_and_parse_metadata_sync,
metadata_file_version,
)
try:
metadata_content = await asyncio.wait_for(
self._hass.async_add_executor_job(
self._download_and_parse_metadata_sync,
metadata_file_version,
),
timeout=METADATA_DOWNLOAD_TIMEOUT,
)
except TimeoutError:
raise BackupAgentError(
f"Timeout downloading metadata for backup {backup_id}"
) from None
_LOGGER.debug(
"Successfully retrieved metadata for backup ID %s from file %s",
@@ -468,16 +489,27 @@ class BackblazeBackupAgent(BackupAgent):
# Process metadata files sequentially to avoid exhausting executor pool
for file_name, file_version in all_files_in_prefix.items():
if file_name.endswith(METADATA_FILE_SUFFIX):
(
result_backup_file,
result_metadata_file_version,
) = await self._hass.async_add_executor_job(
self._process_metadata_file_for_id_sync,
file_name,
file_version,
backup_id,
all_files_in_prefix,
)
try:
(
result_backup_file,
result_metadata_file_version,
) = await asyncio.wait_for(
self._hass.async_add_executor_job(
self._process_metadata_file_for_id_sync,
file_name,
file_version,
backup_id,
all_files_in_prefix,
),
timeout=METADATA_DOWNLOAD_TIMEOUT,
)
except TimeoutError:
_LOGGER.warning(
"Timeout downloading metadata file %s while searching for backup %s",
file_name,
backup_id,
)
continue
if result_backup_file and result_metadata_file_version:
return result_backup_file, result_metadata_file_version

View File

@@ -29,6 +29,9 @@
"state": {
"off": "mdi:volume-low"
}
},
"room_correction": {
"default": "mdi:arrow-oscillating"
}
}
}

View File

@@ -62,6 +62,9 @@
},
"pre_amp": {
"name": "Pre-Amp"
},
"room_correction": {
"name": "Room correction"
}
}
},

View File

@@ -1,8 +1,8 @@
"""Support for Cambridge Audio switch entities."""
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import Any
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any
from aiostreammagic import StreamMagicClient
@@ -21,10 +21,18 @@ PARALLEL_UPDATES = 0
class CambridgeAudioSwitchEntityDescription(SwitchEntityDescription):
"""Describes Cambridge Audio switch entity."""
load_fn: Callable[[StreamMagicClient], bool] = field(default=lambda _: True)
value_fn: Callable[[StreamMagicClient], bool]
set_value_fn: Callable[[StreamMagicClient, bool], Awaitable[None]]
def room_correction_enabled(client: StreamMagicClient) -> bool:
"""Check if room correction is enabled."""
if TYPE_CHECKING:
assert client.audio.tilt_eq is not None
return client.audio.tilt_eq.enabled
CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
CambridgeAudioSwitchEntityDescription(
key="pre_amp",
@@ -40,6 +48,14 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
value_fn=lambda client: client.update.early_update,
set_value_fn=lambda client, value: client.set_early_update(value),
),
CambridgeAudioSwitchEntityDescription(
key="room_correction",
translation_key="room_correction",
entity_category=EntityCategory.CONFIG,
load_fn=lambda client: client.audio.tilt_eq is not None,
value_fn=room_correction_enabled,
set_value_fn=lambda client, value: client.set_room_correction_mode(value),
),
)
@@ -49,9 +65,11 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Cambridge Audio switch entities based on a config entry."""
client: StreamMagicClient = entry.runtime_data
async_add_entities(
CambridgeAudioSwitch(entry.runtime_data, description)
for description in CONTROL_ENTITIES
if description.load_fn(client)
)

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.13.0", "openai==2.15.0"],
"requirements": ["hass-nabucasa==1.13.0", "openai==2.21.0"],
"single_config_entry": true
}

View File

@@ -2,86 +2,23 @@
from __future__ import annotations
import asyncio
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
import socket
import pycfdns
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.util.location import async_detect_location_info
from homeassistant.util.network import is_ipv4_address
from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE_RECORDS
_LOGGER = logging.getLogger(__name__)
type CloudflareConfigEntry = ConfigEntry[CloudflareRuntimeData]
@dataclass
class CloudflareRuntimeData:
"""Runtime data for Cloudflare config entry."""
client: pycfdns.Client
dns_zone: pycfdns.ZoneModel
from .const import DOMAIN, SERVICE_UPDATE_RECORDS
from .coordinator import CloudflareConfigEntry, CloudflareCoordinator
async def async_setup_entry(hass: HomeAssistant, entry: CloudflareConfigEntry) -> bool:
"""Set up Cloudflare from a config entry."""
session = async_get_clientsession(hass)
client = pycfdns.Client(
api_token=entry.data[CONF_API_TOKEN],
client_session=session,
)
entry.runtime_data = CloudflareCoordinator(hass, entry)
await entry.runtime_data.async_config_entry_first_refresh()
try:
dns_zones = await client.list_zones()
dns_zone = next(
zone for zone in dns_zones if zone["name"] == entry.data[CONF_ZONE]
)
except pycfdns.AuthenticationException as error:
raise ConfigEntryAuthFailed from error
except pycfdns.ComunicationException as error:
raise ConfigEntryNotReady from error
# Since we are not using coordinator for data reads, we need to add dummy listener
entry.async_on_unload(entry.runtime_data.async_add_listener(lambda: None))
entry.runtime_data = CloudflareRuntimeData(client, dns_zone)
async def update_records(now: datetime) -> None:
"""Set up recurring update."""
try:
await _async_update_cloudflare(hass, entry)
except (
pycfdns.AuthenticationException,
pycfdns.ComunicationException,
) as error:
_LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error)
async def update_records_service(call: ServiceCall) -> None:
async def update_records_service(_: ServiceCall) -> None:
"""Set up service for manual trigger."""
try:
await _async_update_cloudflare(hass, entry)
except (
pycfdns.AuthenticationException,
pycfdns.ComunicationException,
) as error:
_LOGGER.error("Error updating zone %s: %s", entry.data[CONF_ZONE], error)
update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
entry.async_on_unload(
async_track_time_interval(hass, update_records, update_interval)
)
await entry.runtime_data.async_request_refresh()
hass.services.async_register(DOMAIN, SERVICE_UPDATE_RECORDS, update_records_service)
@@ -92,49 +29,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: CloudflareConfigEntry)
"""Unload Cloudflare config entry."""
return True
async def _async_update_cloudflare(
hass: HomeAssistant,
entry: CloudflareConfigEntry,
) -> None:
client = entry.runtime_data.client
dns_zone = entry.runtime_data.dns_zone
target_records: list[str] = entry.data[CONF_RECORDS]
_LOGGER.debug("Starting update for zone %s", dns_zone["name"])
records = await client.list_dns_records(zone_id=dns_zone["id"], type="A")
_LOGGER.debug("Records: %s", records)
session = async_get_clientsession(hass, family=socket.AF_INET)
location_info = await async_detect_location_info(session)
if not location_info or not is_ipv4_address(location_info.ip):
raise HomeAssistantError("Could not get external IPv4 address")
filtered_records = [
record
for record in records
if record["name"] in target_records and record["content"] != location_info.ip
]
if len(filtered_records) == 0:
_LOGGER.debug("All target records are up to date")
return
await asyncio.gather(
*[
client.update_dns_record(
zone_id=dns_zone["id"],
record_id=record["id"],
record_content=location_info.ip,
record_name=record["name"],
record_type=record["type"],
record_proxied=record["proxied"],
)
for record in filtered_records
]
)
_LOGGER.debug("Update for zone %s is complete", dns_zone["name"])

View File

@@ -0,0 +1,116 @@
"""Contains the Coordinator for updating the IP addresses of your Cloudflare DNS records."""
from __future__ import annotations
import asyncio
from datetime import timedelta
from logging import getLogger
import socket
import pycfdns
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_API_TOKEN, CONF_ZONE
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util.location import async_detect_location_info
from homeassistant.util.network import is_ipv4_address
from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL
_LOGGER = getLogger(__name__)
type CloudflareConfigEntry = ConfigEntry[CloudflareCoordinator]
class CloudflareCoordinator(DataUpdateCoordinator[None]):
"""Coordinates records updates."""
config_entry: CloudflareConfigEntry
client: pycfdns.Client
zone: pycfdns.ZoneModel
def __init__(
self, hass: HomeAssistant, config_entry: CloudflareConfigEntry
) -> None:
"""Initialize an coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=config_entry.title,
update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL),
)
async def _async_setup(self) -> None:
"""Set up the coordinator."""
self.client = pycfdns.Client(
api_token=self.config_entry.data[CONF_API_TOKEN],
client_session=async_get_clientsession(self.hass),
)
try:
self.zone = next(
zone
for zone in await self.client.list_zones()
if zone["name"] == self.config_entry.data[CONF_ZONE]
)
except pycfdns.AuthenticationException as e:
raise ConfigEntryAuthFailed from e
except pycfdns.ComunicationException as e:
raise UpdateFailed("Error communicating with API") from e
async def _async_update_data(self) -> None:
"""Update records."""
_LOGGER.debug("Starting update for zone %s", self.zone["name"])
try:
records = await self.client.list_dns_records(
zone_id=self.zone["id"], type="A"
)
_LOGGER.debug("Records: %s", records)
target_records: list[str] = self.config_entry.data[CONF_RECORDS]
location_info = await async_detect_location_info(
async_get_clientsession(self.hass, family=socket.AF_INET)
)
if not location_info or not is_ipv4_address(location_info.ip):
raise UpdateFailed("Could not get external IPv4 address")
filtered_records = [
record
for record in records
if record["name"] in target_records
and record["content"] != location_info.ip
]
if len(filtered_records) == 0:
_LOGGER.debug("All target records are up to date")
return
await asyncio.gather(
*[
self.client.update_dns_record(
zone_id=self.zone["id"],
record_id=record["id"],
record_content=location_info.ip,
record_name=record["name"],
record_type=record["type"],
record_proxied=record["proxied"],
)
for record in filtered_records
]
)
_LOGGER.debug("Update for zone %s is complete", self.zone["name"])
except (
pycfdns.AuthenticationException,
pycfdns.ComunicationException,
) as e:
raise UpdateFailed(
f"Error updating zone {self.config_entry.data[CONF_ZONE]}"
) from e

View File

@@ -11,7 +11,9 @@ from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PLATFORMS = [
Platform.CLIMATE,
Platform.NUMBER,
Platform.SELECT,
Platform.WATER_HEATER,
]

View File

@@ -1,5 +1,43 @@
{
"entity": {
"number": {
"boiler_target_temperature": {
"default": "mdi:water-boiler"
},
"boiler_target_temperature_const": {
"default": "mdi:water-boiler"
},
"heating_target_temperature_const": {
"default": "mdi:radiator"
},
"mixer_target_temperature": {
"default": "mdi:valve"
},
"mixer_target_temperature_zone": {
"default": "mdi:valve"
},
"target_temperature_comfort": {
"default": "mdi:thermometer"
},
"target_temperature_const": {
"default": "mdi:thermometer-lines"
},
"target_temperature_eco": {
"default": "mdi:leaf"
},
"target_temperature_eco_cooling": {
"default": "mdi:snowflake-thermometer"
},
"target_temperature_eco_winter": {
"default": "mdi:thermometer"
},
"target_temperature_holiday": {
"default": "mdi:beach"
},
"target_temperature_out_of_home": {
"default": "mdi:thermometer-off"
}
},
"select": {
"aero_by_pass": {
"default": "mdi:valve",

View File

@@ -0,0 +1,339 @@
"""Number platform for Compit integration."""
from dataclasses import dataclass
from compit_inext_api.consts import CompitParameter
from homeassistant.components.number import (
NumberDeviceClass,
NumberEntity,
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import EntityCategory, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER_NAME
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class CompitDeviceDescription:
"""Class to describe a Compit device."""
name: str
"""Name of the device."""
parameters: list[NumberEntityDescription]
"""Parameters of the device."""
DESCRIPTIONS: dict[CompitParameter, NumberEntityDescription] = {
CompitParameter.TARGET_TEMPERATURE_COMFORT: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_COMFORT.value,
translation_key="target_temperature_comfort",
native_min_value=0,
native_max_value=40,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.TARGET_TEMPERATURE_ECO_WINTER: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_ECO_WINTER.value,
translation_key="target_temperature_eco_winter",
native_min_value=0,
native_max_value=40,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.TARGET_TEMPERATURE_ECO_COOLING: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_ECO_COOLING.value,
translation_key="target_temperature_eco_cooling",
native_min_value=0,
native_max_value=40,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME.value,
translation_key="target_temperature_out_of_home",
native_min_value=0,
native_max_value=40,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.TARGET_TEMPERATURE_ECO: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_ECO.value,
translation_key="target_temperature_eco",
native_min_value=0,
native_max_value=40,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.TARGET_TEMPERATURE_HOLIDAY: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_HOLIDAY.value,
translation_key="target_temperature_holiday",
native_min_value=0,
native_max_value=40,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.TARGET_TEMPERATURE_CONST: NumberEntityDescription(
key=CompitParameter.TARGET_TEMPERATURE_CONST.value,
translation_key="target_temperature_const",
native_min_value=0,
native_max_value=95,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.HEATING_TARGET_TEMPERATURE_CONST: NumberEntityDescription(
key=CompitParameter.HEATING_TARGET_TEMPERATURE_CONST.value,
translation_key="heating_target_temperature_const",
native_min_value=0,
native_max_value=95,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.MIXER_TARGET_TEMPERATURE: NumberEntityDescription(
key=CompitParameter.MIXER_TARGET_TEMPERATURE.value,
translation_key="mixer_target_temperature",
native_min_value=0,
native_max_value=90,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.MIXER1_TARGET_TEMPERATURE: NumberEntityDescription(
key=CompitParameter.MIXER1_TARGET_TEMPERATURE.value,
translation_key="mixer_target_temperature_zone",
native_min_value=0,
native_max_value=95,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
translation_placeholders={"zone": "1"},
),
CompitParameter.MIXER2_TARGET_TEMPERATURE: NumberEntityDescription(
key=CompitParameter.MIXER2_TARGET_TEMPERATURE.value,
translation_key="mixer_target_temperature_zone",
native_min_value=0,
native_max_value=95,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
translation_placeholders={"zone": "2"},
),
CompitParameter.BOILER_TARGET_TEMPERATURE: NumberEntityDescription(
key=CompitParameter.BOILER_TARGET_TEMPERATURE.value,
translation_key="boiler_target_temperature",
native_min_value=0,
native_max_value=95,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
CompitParameter.BOILER_TARGET_TEMPERATURE_CONST: NumberEntityDescription(
key=CompitParameter.BOILER_TARGET_TEMPERATURE_CONST.value,
translation_key="boiler_target_temperature_const",
native_min_value=0,
native_max_value=90,
native_step=0.1,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=NumberDeviceClass.TEMPERATURE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
}
DEVICE_DEFINITIONS: dict[int, CompitDeviceDescription] = {
7: CompitDeviceDescription(
name="Nano One",
parameters=[
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_COMFORT],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_HOLIDAY],
],
),
12: CompitDeviceDescription(
name="Nano Color",
parameters=[
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_COMFORT],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_WINTER],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_COOLING],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME],
],
),
223: CompitDeviceDescription(
name="Nano Color 2",
parameters=[
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_COMFORT],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_WINTER],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_ECO_COOLING],
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_OUT_OF_HOME],
],
),
3: CompitDeviceDescription(
name="R810",
parameters=[
DESCRIPTIONS[CompitParameter.TARGET_TEMPERATURE_CONST],
],
),
34: CompitDeviceDescription(
name="r470",
parameters=[
DESCRIPTIONS[CompitParameter.HEATING_TARGET_TEMPERATURE_CONST],
],
),
221: CompitDeviceDescription(
name="R350.M",
parameters=[
DESCRIPTIONS[CompitParameter.MIXER_TARGET_TEMPERATURE],
],
),
91: CompitDeviceDescription(
name="R770RS / R771RS",
parameters=[
DESCRIPTIONS[CompitParameter.MIXER1_TARGET_TEMPERATURE],
DESCRIPTIONS[CompitParameter.MIXER2_TARGET_TEMPERATURE],
],
),
212: CompitDeviceDescription(
name="BioMax742",
parameters=[
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE],
],
),
210: CompitDeviceDescription(
name="EL750",
parameters=[
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE],
],
),
36: CompitDeviceDescription(
name="BioMax742",
parameters=[
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE_CONST],
],
),
75: CompitDeviceDescription(
name="BioMax772",
parameters=[
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE_CONST],
],
),
201: CompitDeviceDescription(
name="BioMax775",
parameters=[
DESCRIPTIONS[CompitParameter.BOILER_TARGET_TEMPERATURE_CONST],
],
),
}
async def async_setup_entry(
hass: HomeAssistant,
entry: CompitConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Compit number entities from a config entry."""
coordinator = entry.runtime_data
async_add_entities(
CompitNumber(
coordinator,
device_id,
device_definition.name,
entity_description,
)
for device_id, device in coordinator.connector.all_devices.items()
if (device_definition := DEVICE_DEFINITIONS.get(device.definition.code))
for entity_description in device_definition.parameters
)
class CompitNumber(CoordinatorEntity[CompitDataUpdateCoordinator], NumberEntity):
"""Representation of a Compit number entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: CompitDataUpdateCoordinator,
device_id: int,
device_name: str,
entity_description: NumberEntityDescription,
) -> None:
"""Initialize the number entity."""
super().__init__(coordinator)
self.device_id = device_id
self.entity_description = entity_description
self._attr_unique_id = f"{device_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, str(device_id))},
name=device_name,
manufacturer=MANUFACTURER_NAME,
model=device_name,
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
super().available
and self.coordinator.connector.get_device(self.device_id) is not None
)
@property
def native_value(self) -> float | None:
"""Return the current value."""
value = self.coordinator.connector.get_current_value(
self.device_id, CompitParameter(self.entity_description.key)
)
if value is None or isinstance(value, str):
return None
return value
async def async_set_native_value(self, value: float) -> None:
"""Set new value."""
await self.coordinator.connector.set_device_parameter(
self.device_id, CompitParameter(self.entity_description.key), value
)
self.async_write_ha_state()

View File

@@ -33,6 +33,44 @@
}
},
"entity": {
"number": {
"boiler_target_temperature": {
"name": "Boiler target temperature"
},
"boiler_target_temperature_const": {
"name": "Constant boiler target temperature"
},
"heating_target_temperature_const": {
"name": "Constant heating target temperature"
},
"mixer_target_temperature": {
"name": "Mixer target temperature"
},
"mixer_target_temperature_zone": {
"name": "Mixer {zone} target temperature"
},
"target_temperature_comfort": {
"name": "Target comfort temperature"
},
"target_temperature_const": {
"name": "Constant target temperature"
},
"target_temperature_eco": {
"name": "Target eco temperature"
},
"target_temperature_eco_cooling": {
"name": "Target eco cooling temperature"
},
"target_temperature_eco_winter": {
"name": "Target eco winter temperature"
},
"target_temperature_holiday": {
"name": "Target holiday temperature"
},
"target_temperature_out_of_home": {
"name": "Target out of home temperature"
}
},
"select": {
"aero_by_pass": {
"name": "Bypass",

View File

@@ -0,0 +1,315 @@
"""Water heater platform for Compit integration."""
from dataclasses import dataclass
from typing import Any
from compit_inext_api.consts import CompitParameter
from propcache.api import cached_property
from homeassistant.components.water_heater import (
STATE_ECO,
STATE_OFF,
STATE_ON,
STATE_PERFORMANCE,
WaterHeaterEntity,
WaterHeaterEntityDescription,
WaterHeaterEntityFeature,
)
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_WHOLE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER_NAME
from .coordinator import CompitConfigEntry, CompitDataUpdateCoordinator
PARALLEL_UPDATES = 0
STATE_SCHEDULE = "schedule"
COMPIT_STATE_TO_HA = {
STATE_OFF: STATE_OFF,
STATE_ON: STATE_PERFORMANCE,
STATE_SCHEDULE: STATE_ECO,
}
HA_STATE_TO_COMPIT = {value: key for key, value in COMPIT_STATE_TO_HA.items()}
@dataclass(frozen=True, kw_only=True)
class CompitWaterHeaterEntityDescription(WaterHeaterEntityDescription):
"""Class to describe a Compit water heater device."""
min_temp: float
max_temp: float
supported_features: WaterHeaterEntityFeature
supports_current_temperature: bool = True
DEVICE_DEFINITIONS: dict[int, CompitWaterHeaterEntityDescription] = {
34: CompitWaterHeaterEntityDescription(
key="r470",
min_temp=0.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
91: CompitWaterHeaterEntityDescription(
key="R770RS / R771RS",
min_temp=30.0,
max_temp=80.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
92: CompitWaterHeaterEntityDescription(
key="r490",
min_temp=30.0,
max_temp=80.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
215: CompitWaterHeaterEntityDescription(
key="R480",
min_temp=30.0,
max_temp=80.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
222: CompitWaterHeaterEntityDescription(
key="R377B",
min_temp=30.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
224: CompitWaterHeaterEntityDescription(
key="R 900",
min_temp=0.0,
max_temp=70.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
36: CompitWaterHeaterEntityDescription(
key="BioMax742",
min_temp=0.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
75: CompitWaterHeaterEntityDescription(
key="BioMax772",
min_temp=0.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
201: CompitWaterHeaterEntityDescription(
key="BioMax775",
min_temp=0.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
210: CompitWaterHeaterEntityDescription(
key="EL750",
min_temp=30.0,
max_temp=80.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE,
),
44: CompitWaterHeaterEntityDescription(
key="SolarComp 951",
min_temp=0.0,
max_temp=85.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
supports_current_temperature=False,
),
45: CompitWaterHeaterEntityDescription(
key="SolarComp971",
min_temp=0.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
supports_current_temperature=False,
),
99: CompitWaterHeaterEntityDescription(
key="SolarComp971C",
min_temp=0.0,
max_temp=75.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
supports_current_temperature=False,
),
53: CompitWaterHeaterEntityDescription(
key="R350.CWU",
min_temp=0.0,
max_temp=80.0,
supported_features=WaterHeaterEntityFeature.TARGET_TEMPERATURE,
),
}
async def async_setup_entry(
hass: HomeAssistant,
entry: CompitConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Compit water heater entities from a config entry."""
coordinator = entry.runtime_data
async_add_entities(
CompitWaterHeater(coordinator, device_id, entity_description)
for device_id, device in coordinator.connector.all_devices.items()
if (entity_description := DEVICE_DEFINITIONS.get(device.definition.code))
)
class CompitWaterHeater(
CoordinatorEntity[CompitDataUpdateCoordinator], WaterHeaterEntity
):
"""Representation of a Compit Water Heater."""
_attr_target_temperature_step = PRECISION_WHOLE
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_has_entity_name = True
_attr_name = None
entity_description: CompitWaterHeaterEntityDescription
def __init__(
self,
coordinator: CompitDataUpdateCoordinator,
device_id: int,
entity_description: CompitWaterHeaterEntityDescription,
) -> None:
"""Initialize the water heater."""
super().__init__(coordinator)
self.device_id = device_id
self.entity_description = entity_description
self._attr_unique_id = f"{device_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, str(device_id))},
name=entity_description.key,
manufacturer=MANUFACTURER_NAME,
model=entity_description.key,
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
super().available
and self.coordinator.connector.get_device(self.device_id) is not None
)
@cached_property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return self.entity_description.min_temp
@cached_property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return self.entity_description.max_temp
@cached_property
def supported_features(self) -> WaterHeaterEntityFeature:
"""Return the supported features."""
return self.entity_description.supported_features
@cached_property
def operation_list(self) -> list[str] | None:
"""Return the list of available operation modes."""
if (
self.entity_description.supported_features
& WaterHeaterEntityFeature.OPERATION_MODE
):
return [STATE_OFF, STATE_PERFORMANCE, STATE_ECO]
return None
@property
def target_temperature(self) -> float | None:
"""Return the set target temperature."""
value = self.coordinator.connector.get_current_value(
self.device_id, CompitParameter.DHW_TARGET_TEMPERATURE
)
if isinstance(value, float):
return value
return None
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
if self.entity_description.supports_current_temperature is False:
return None
value = self.coordinator.connector.get_current_value(
self.device_id, CompitParameter.DHW_CURRENT_TEMPERATURE
)
if isinstance(value, float):
return value
return None
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
self._attr_target_temperature = temperature
await self.coordinator.connector.set_device_parameter(
self.device_id,
CompitParameter.DHW_TARGET_TEMPERATURE,
float(temperature),
)
self.async_write_ha_state()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the water heater on."""
await self.coordinator.connector.select_device_option(
self.device_id,
CompitParameter.DHW_ON_OFF,
HA_STATE_TO_COMPIT[STATE_PERFORMANCE],
)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the water heater off."""
await self.coordinator.connector.select_device_option(
self.device_id,
CompitParameter.DHW_ON_OFF,
HA_STATE_TO_COMPIT[STATE_OFF],
)
self.async_write_ha_state()
async def async_set_operation_mode(self, operation_mode: str) -> None:
"""Set new operation mode."""
await self.coordinator.connector.select_device_option(
self.device_id,
CompitParameter.DHW_ON_OFF,
HA_STATE_TO_COMPIT[operation_mode],
)
self.async_write_ha_state()
@property
def current_operation(self) -> str | None:
"""Return the current operation mode."""
on_off = self.coordinator.connector.get_current_option(
self.device_id, CompitParameter.DHW_ON_OFF
)
if on_off is None:
return None
return COMPIT_STATE_TO_HA.get(on_off)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.1.28"]
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.2.13"]
}

View File

@@ -131,23 +131,29 @@ class CyncLightEntity(CyncBaseEntity, LightEntity):
async def async_turn_on(self, **kwargs: Any) -> None:
"""Process an action on the light."""
if not kwargs:
await self._device.turn_on()
converted_brightness: int | None = None
converted_color_temp: int | None = None
rgb: tuple[int, int, int] | None = None
elif kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None:
if kwargs.get(ATTR_COLOR_TEMP_KELVIN) is not None:
color_temp = kwargs.get(ATTR_COLOR_TEMP_KELVIN)
converted_color_temp = self._normalize_color_temp(color_temp)
await self._device.set_color_temp(converted_color_temp)
elif kwargs.get(ATTR_RGB_COLOR) is not None:
rgb = kwargs.get(ATTR_RGB_COLOR)
elif self.color_mode == ColorMode.RGB:
rgb = self._device.rgb
elif self.color_mode == ColorMode.COLOR_TEMP:
converted_color_temp = self._device.color_temp
await self._device.set_rgb(rgb)
elif kwargs.get(ATTR_BRIGHTNESS) is not None:
if kwargs.get(ATTR_BRIGHTNESS) is not None:
brightness = kwargs.get(ATTR_BRIGHTNESS)
converted_brightness = self._normalize_brightness(brightness)
elif self.color_mode != ColorMode.ONOFF:
converted_brightness = self._device.brightness
await self._device.set_brightness(converted_brightness)
await self._device.set_combo(
True, converted_brightness, converted_color_temp, rgb
)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn off the light."""

View File

@@ -7,6 +7,6 @@
"integration_type": "device",
"iot_class": "local_polling",
"loggers": ["pydaikin"],
"requirements": ["pydaikin==2.17.1"],
"requirements": ["pydaikin==2.17.2"],
"zeroconf": ["_dkapi._tcp.local."]
}

View File

@@ -8,7 +8,7 @@ import voluptuous as vol
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import ConfigEntrySelector
@@ -47,13 +47,9 @@ def get_config_entry(
translation_domain=DOMAIN,
translation_key="entry_not_selected",
)
return entries[0]
if not (entry := hass.config_entries.async_get_entry(entry_id)):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_found",
)
return entry
entry_id = entries[0].entry_id
return service.async_get_config_entry(hass, DOMAIN, entry_id)
async def update_domain_service(call: ServiceCall) -> None:

View File

@@ -9,49 +9,34 @@ Note that the API used by this integration's client does not support cooling.
from __future__ import annotations
from dataclasses import dataclass
from datetime import timedelta
import logging
from typing import Final
import evohomeasync as ec1
import evohomeasync2 as ec2
from evohomeasync2.const import SZ_CAN_BE_TEMPORARY, SZ_SYSTEM_MODE, SZ_TIMING_MODE
from evohomeasync2.schemas.const import (
S2_DURATION as SZ_DURATION,
S2_PERIOD as SZ_PERIOD,
SystemMode as EvoSystemMode,
)
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
Platform,
)
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.discovery import async_load_platform
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.service import verify_domain_control
from homeassistant.helpers.typing import ConfigType
from homeassistant.util.hass_dict import HassKey
from .const import (
ATTR_DURATION,
ATTR_DURATION_UNTIL,
ATTR_PERIOD,
ATTR_SETPOINT,
CONF_LOCATION_IDX,
DOMAIN,
EVOHOME_DATA,
SCAN_INTERVAL_DEFAULT,
SCAN_INTERVAL_MINIMUM,
EvoService,
)
from .coordinator import EvoDataUpdateCoordinator
from .services import setup_service_functions
from .storage import TokenManager
_LOGGER = logging.getLogger(__name__)
@@ -72,26 +57,6 @@ CONFIG_SCHEMA: Final = vol.Schema(
extra=vol.ALLOW_EXTRA,
)
# system mode schemas are built dynamically when the services are registered
# because supported modes can vary for edge-case systems
RESET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
{vol.Required(ATTR_ENTITY_ID): cv.entity_id}
)
SET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_SETPOINT): vol.All(
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
),
vol.Optional(ATTR_DURATION_UNTIL): vol.All(
cv.time_period, vol.Range(min=timedelta(days=0), max=timedelta(days=1))
),
}
)
EVOHOME_KEY: HassKey[EvoData] = HassKey(DOMAIN)
@dataclass
class EvoData:
@@ -130,7 +95,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
assert coordinator.tcs is not None # mypy
hass.data[EVOHOME_KEY] = EvoData(
hass.data[EVOHOME_DATA] = EvoData(
coordinator=coordinator,
loc_idx=coordinator.loc_idx,
tcs=coordinator.tcs,
@@ -147,132 +112,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
setup_service_functions(hass, coordinator)
return True
@callback
def setup_service_functions(
hass: HomeAssistant, coordinator: EvoDataUpdateCoordinator
) -> None:
"""Set up the service handlers for the system/zone operating modes.
Not all Honeywell TCC-compatible systems support all operating modes. In addition,
each mode will require any of four distinct service schemas. This has to be
enumerated before registering the appropriate handlers.
It appears that all TCC-compatible systems support the same three zones modes.
"""
@verify_domain_control(DOMAIN)
async def force_refresh(call: ServiceCall) -> None:
"""Obtain the latest state data via the vendor's RESTful API."""
await coordinator.async_refresh()
@verify_domain_control(DOMAIN)
async def set_system_mode(call: ServiceCall) -> None:
"""Set the system mode."""
assert coordinator.tcs is not None # mypy
payload = {
"unique_id": coordinator.tcs.id,
"service": call.service,
"data": call.data,
}
async_dispatcher_send(hass, DOMAIN, payload)
@verify_domain_control(DOMAIN)
async def set_zone_override(call: ServiceCall) -> None:
"""Set the zone override (setpoint)."""
entity_id = call.data[ATTR_ENTITY_ID]
registry = er.async_get(hass)
registry_entry = registry.async_get(entity_id)
if registry_entry is None or registry_entry.platform != DOMAIN:
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
if registry_entry.domain != "climate":
raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone")
payload = {
"unique_id": registry_entry.unique_id,
"service": call.service,
"data": call.data,
}
async_dispatcher_send(hass, DOMAIN, payload)
assert coordinator.tcs is not None # mypy
hass.services.async_register(DOMAIN, EvoService.REFRESH_SYSTEM, force_refresh)
# Enumerate which operating modes are supported by this system
modes = list(coordinator.tcs.allowed_system_modes)
# Not all systems support "AutoWithReset": register this handler only if required
if any(
m[SZ_SYSTEM_MODE]
for m in modes
if m[SZ_SYSTEM_MODE] == EvoSystemMode.AUTO_WITH_RESET
):
hass.services.async_register(DOMAIN, EvoService.RESET_SYSTEM, set_system_mode)
system_mode_schemas = []
modes = [m for m in modes if m[SZ_SYSTEM_MODE] != EvoSystemMode.AUTO_WITH_RESET]
# Permanent-only modes will use this schema
perm_modes = [m[SZ_SYSTEM_MODE] for m in modes if not m[SZ_CAN_BE_TEMPORARY]]
if perm_modes: # any of: "Auto", "HeatingOff": permanent only
schema = vol.Schema({vol.Required(ATTR_MODE): vol.In(perm_modes)})
system_mode_schemas.append(schema)
modes = [m for m in modes if m[SZ_CAN_BE_TEMPORARY]]
# These modes are set for a number of hours (or indefinitely): use this schema
temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == SZ_DURATION]
if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours
schema = vol.Schema(
{
vol.Required(ATTR_MODE): vol.In(temp_modes),
vol.Optional(ATTR_DURATION): vol.All(
cv.time_period,
vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)),
),
}
)
system_mode_schemas.append(schema)
# These modes are set for a number of days (or indefinitely): use this schema
temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == SZ_PERIOD]
if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days
schema = vol.Schema(
{
vol.Required(ATTR_MODE): vol.In(temp_modes),
vol.Optional(ATTR_PERIOD): vol.All(
cv.time_period,
vol.Range(min=timedelta(days=1), max=timedelta(days=99)),
),
}
)
system_mode_schemas.append(schema)
if system_mode_schemas:
hass.services.async_register(
DOMAIN,
EvoService.SET_SYSTEM_MODE,
set_system_mode,
schema=vol.Schema(vol.Any(*system_mode_schemas)),
)
# The zone modes are consistent across all systems and use the same schema
hass.services.async_register(
DOMAIN,
EvoService.RESET_ZONE_OVERRIDE,
set_zone_override,
schema=RESET_ZONE_OVERRIDE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
EvoService.SET_ZONE_OVERRIDE,
set_zone_override,
schema=SET_ZONE_OVERRIDE_SCHEMA,
)

View File

@@ -41,12 +41,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util
from . import EVOHOME_KEY
from .const import (
ATTR_DURATION,
ATTR_DURATION_UNTIL,
ATTR_PERIOD,
ATTR_SETPOINT,
EVOHOME_DATA,
EvoService,
)
from .coordinator import EvoDataUpdateCoordinator
@@ -85,9 +85,9 @@ async def async_setup_platform(
if discovery_info is None:
return
coordinator = hass.data[EVOHOME_KEY].coordinator
loc_idx = hass.data[EVOHOME_KEY].loc_idx
tcs = hass.data[EVOHOME_KEY].tcs
coordinator = hass.data[EVOHOME_DATA].coordinator
loc_idx = hass.data[EVOHOME_DATA].loc_idx
tcs = hass.data[EVOHOME_DATA].tcs
_LOGGER.debug(
"Found the Location/Controller (%s), id=%s, name=%s (location_idx=%s)",

View File

@@ -4,9 +4,15 @@ from __future__ import annotations
from datetime import timedelta
from enum import StrEnum, unique
from typing import Final
from typing import TYPE_CHECKING, Final
from homeassistant.util.hass_dict import HassKey
if TYPE_CHECKING:
from . import EvoData
DOMAIN: Final = "evohome"
EVOHOME_DATA: HassKey[EvoData] = HassKey(DOMAIN)
STORAGE_VER: Final = 1
STORAGE_KEY: Final = DOMAIN

View File

@@ -0,0 +1,178 @@
"""Service handlers for the Evohome integration."""
from __future__ import annotations
from datetime import timedelta
from typing import Final
from evohomeasync2.const import SZ_CAN_BE_TEMPORARY, SZ_SYSTEM_MODE, SZ_TIMING_MODE
from evohomeasync2.schemas.const import (
S2_DURATION as SZ_DURATION,
S2_PERIOD as SZ_PERIOD,
SystemMode as EvoSystemMode,
)
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID, ATTR_MODE
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.helpers import config_validation as cv, entity_registry as er
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.service import verify_domain_control
from .const import (
ATTR_DURATION,
ATTR_DURATION_UNTIL,
ATTR_PERIOD,
ATTR_SETPOINT,
DOMAIN,
EvoService,
)
from .coordinator import EvoDataUpdateCoordinator
# system mode schemas are built dynamically when the services are registered
# because supported modes can vary for edge-case systems
RESET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
{vol.Required(ATTR_ENTITY_ID): cv.entity_id}
)
SET_ZONE_OVERRIDE_SCHEMA: Final = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
vol.Required(ATTR_SETPOINT): vol.All(
vol.Coerce(float), vol.Range(min=4.0, max=35.0)
),
vol.Optional(ATTR_DURATION_UNTIL): vol.All(
cv.time_period,
vol.Range(min=timedelta(days=0), max=timedelta(days=1)),
),
}
)
@callback
def setup_service_functions(
hass: HomeAssistant, coordinator: EvoDataUpdateCoordinator
) -> None:
"""Set up the service handlers for the system/zone operating modes.
Not all Honeywell TCC-compatible systems support all operating modes. In addition,
each mode will require any of four distinct service schemas. This has to be
enumerated before registering the appropriate handlers.
It appears that all TCC-compatible systems support the same three zones modes.
"""
@verify_domain_control(DOMAIN)
async def force_refresh(call: ServiceCall) -> None:
"""Obtain the latest state data via the vendor's RESTful API."""
await coordinator.async_refresh()
@verify_domain_control(DOMAIN)
async def set_system_mode(call: ServiceCall) -> None:
"""Set the system mode."""
assert coordinator.tcs is not None # mypy
payload = {
"unique_id": coordinator.tcs.id,
"service": call.service,
"data": call.data,
}
async_dispatcher_send(hass, DOMAIN, payload)
@verify_domain_control(DOMAIN)
async def set_zone_override(call: ServiceCall) -> None:
"""Set the zone override (setpoint)."""
entity_id = call.data[ATTR_ENTITY_ID]
registry = er.async_get(hass)
registry_entry = registry.async_get(entity_id)
if registry_entry is None or registry_entry.platform != DOMAIN:
raise ValueError(f"'{entity_id}' is not a known {DOMAIN} entity")
if registry_entry.domain != "climate":
raise ValueError(f"'{entity_id}' is not an {DOMAIN} controller/zone")
payload = {
"unique_id": registry_entry.unique_id,
"service": call.service,
"data": call.data,
}
async_dispatcher_send(hass, DOMAIN, payload)
assert coordinator.tcs is not None # mypy
hass.services.async_register(DOMAIN, EvoService.REFRESH_SYSTEM, force_refresh)
# Enumerate which operating modes are supported by this system
modes = list(coordinator.tcs.allowed_system_modes)
# Not all systems support "AutoWithReset": register this handler only if required
if any(
m[SZ_SYSTEM_MODE]
for m in modes
if m[SZ_SYSTEM_MODE] == EvoSystemMode.AUTO_WITH_RESET
):
hass.services.async_register(DOMAIN, EvoService.RESET_SYSTEM, set_system_mode)
system_mode_schemas = []
modes = [m for m in modes if m[SZ_SYSTEM_MODE] != EvoSystemMode.AUTO_WITH_RESET]
# Permanent-only modes will use this schema
perm_modes = [m[SZ_SYSTEM_MODE] for m in modes if not m[SZ_CAN_BE_TEMPORARY]]
if perm_modes: # any of: "Auto", "HeatingOff": permanent only
schema = vol.Schema({vol.Required(ATTR_MODE): vol.In(perm_modes)})
system_mode_schemas.append(schema)
modes = [m for m in modes if m[SZ_CAN_BE_TEMPORARY]]
# These modes are set for a number of hours (or indefinitely): use this schema
temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == SZ_DURATION]
if temp_modes: # any of: "AutoWithEco", permanent or for 0-24 hours
schema = vol.Schema(
{
vol.Required(ATTR_MODE): vol.In(temp_modes),
vol.Optional(ATTR_DURATION): vol.All(
cv.time_period,
vol.Range(min=timedelta(hours=0), max=timedelta(hours=24)),
),
}
)
system_mode_schemas.append(schema)
# These modes are set for a number of days (or indefinitely): use this schema
temp_modes = [m[SZ_SYSTEM_MODE] for m in modes if m[SZ_TIMING_MODE] == SZ_PERIOD]
if temp_modes: # any of: "Away", "Custom", "DayOff", permanent or for 1-99 days
schema = vol.Schema(
{
vol.Required(ATTR_MODE): vol.In(temp_modes),
vol.Optional(ATTR_PERIOD): vol.All(
cv.time_period,
vol.Range(min=timedelta(days=1), max=timedelta(days=99)),
),
}
)
system_mode_schemas.append(schema)
if system_mode_schemas:
hass.services.async_register(
DOMAIN,
EvoService.SET_SYSTEM_MODE,
set_system_mode,
schema=vol.Schema(vol.Any(*system_mode_schemas)),
)
# The zone modes are consistent across all systems and use the same schema
hass.services.async_register(
DOMAIN,
EvoService.RESET_ZONE_OVERRIDE,
set_zone_override,
schema=RESET_ZONE_OVERRIDE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
EvoService.SET_ZONE_OVERRIDE,
set_zone_override,
schema=SET_ZONE_OVERRIDE_SCHEMA,
)

View File

@@ -25,7 +25,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import dt as dt_util
from . import EVOHOME_KEY
from .const import EVOHOME_DATA
from .coordinator import EvoDataUpdateCoordinator
from .entity import EvoChild
@@ -47,8 +47,8 @@ async def async_setup_platform(
if discovery_info is None:
return
coordinator = hass.data[EVOHOME_KEY].coordinator
tcs = hass.data[EVOHOME_KEY].tcs
coordinator = hass.data[EVOHOME_DATA].coordinator
tcs = hass.data[EVOHOME_DATA].tcs
assert tcs.hotwater is not None # mypy check

View File

@@ -26,6 +26,7 @@ class MeshRoles(StrEnum):
DOMAIN = "fritz"
SCAN_INTERVAL = 30
PLATFORMS = [
Platform.BINARY_SENSOR,

View File

@@ -36,6 +36,7 @@ from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import slugify
from homeassistant.util.hass_dict import HassKey
from .const import (
@@ -47,6 +48,7 @@ from .const import (
DEFAULT_USERNAME,
DOMAIN,
FRITZ_EXCEPTIONS,
SCAN_INTERVAL,
MeshRoles,
)
from .helpers import _ha_is_stopping
@@ -90,10 +92,56 @@ class UpdateCoordinatorDataType(TypedDict):
entity_states: dict[str, StateType | bool]
class FritzConnectionCached(FritzConnection): # type: ignore[misc]
"""FritzConnection with cached call action."""
_call_cache: dict[str, dict[str, Any]]
def clear_cache(self) -> None:
"""Clear cached calls."""
self._call_cache = {}
_LOGGER.debug("Cleared FritzConnection call action cache")
def call_action(
self,
service_name: str,
action_name: str,
*,
arguments: dict | None = None,
**kwargs: Any,
) -> dict[str, Any]:
"""Call action with cached services. Only get actions are cached."""
if not action_name.lower().startswith("get"):
return super().call_action( # type: ignore[no-any-return]
service_name, action_name, arguments=arguments, **kwargs
)
if not hasattr(self, "_call_cache"):
self._call_cache = {}
kwargs_key = ",".join(f"{k}={v!r}" for k, v in sorted(kwargs.items()))
cache_key = slugify(f"{service_name}:{action_name}:{arguments}:{kwargs_key}")
if (result := self._call_cache.get(cache_key)) is not None:
_LOGGER.debug("Using cached result for %s %s", service_name, action_name)
return result
result = super().call_action(
service_name, action_name, arguments=arguments, **kwargs
)
self._call_cache[cache_key] = result
return result # type: ignore[no-any-return]
class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
"""FritzBoxTools class."""
config_entry: FritzConfigEntry
connection: FritzConnectionCached
fritz_guest_wifi: FritzGuestWLAN
fritz_hosts: FritzHosts
fritz_status: FritzStatus
fritz_call: FritzCall
def __init__(
self,
@@ -112,17 +160,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
config_entry=config_entry,
logger=_LOGGER,
name=f"{DOMAIN}-{host}-coordinator",
update_interval=timedelta(seconds=30),
update_interval=timedelta(seconds=SCAN_INTERVAL),
)
self._devices: dict[str, FritzDevice] = {}
self._options: Mapping[str, Any] | None = None
self._unique_id: str | None = None
self.connection: FritzConnection = None
self.fritz_guest_wifi: FritzGuestWLAN = None
self.fritz_hosts: FritzHosts = None
self.fritz_status: FritzStatus = None
self.fritz_call: FritzCall = None
self.host = host
self.mesh_role = MeshRoles.NONE
self.mesh_wifi_uplink = False
@@ -159,11 +202,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
name=self.config_entry.title,
sw_version=self.current_firmware,
)
self.connection.clear_cache()
def setup(self) -> None:
"""Set up FritzboxTools class."""
self.connection = FritzConnection(
self.connection = FritzConnectionCached(
address=self.host,
port=self.port,
user=self.username,
@@ -263,6 +307,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
"call_deflections": {},
"entity_states": {},
}
self.connection.clear_cache()
try:
await self.async_update_device_info()
@@ -278,6 +323,12 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
"call_deflections"
] = await self.async_update_call_deflections()
except FRITZ_EXCEPTIONS as ex:
_LOGGER.debug(
"Reload %s due to error '%s' to ensure proper re-login",
self.config_entry.title,
ex,
)
self.hass.config_entries.async_schedule_reload(self.config_entry.entry_id)
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from requests.exceptions import ConnectionError as RequestConnectionError, HTTPError
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, UnitOfTemperature
from homeassistant.core import Event, HomeAssistant
@@ -57,7 +59,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: FritzboxConfigEntry) ->
async def async_unload_entry(hass: HomeAssistant, entry: FritzboxConfigEntry) -> bool:
"""Unloading the AVM FRITZ!SmartHome platforms."""
await hass.async_add_executor_job(entry.runtime_data.fritz.logout)
try:
await hass.async_add_executor_job(entry.runtime_data.fritz.logout)
except (RequestConnectionError, HTTPError) as ex:
LOGGER.debug("logout failed with '%s', anyway continue with unload", ex)
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -121,26 +121,11 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
def _update_fritz_devices(self) -> FritzboxCoordinatorData:
"""Update all fritzbox device data."""
try:
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
except RequestConnectionError as ex:
raise UpdateFailed from ex
except HTTPError:
# If the device rebooted, login again
try:
self.fritz.login()
except LoginError as ex:
raise ConfigEntryAuthFailed from ex
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
self.fritz.update_devices(ignore_removed=False)
if self.has_templates:
self.fritz.update_templates(ignore_removed=False)
if self.has_triggers:
self.fritz.update_triggers(ignore_removed=False)
devices = self.fritz.get_devices()
device_data = {}
@@ -193,7 +178,18 @@ class FritzboxDataUpdateCoordinator(DataUpdateCoordinator[FritzboxCoordinatorDat
async def _async_update_data(self) -> FritzboxCoordinatorData:
"""Fetch all device data."""
new_data = await self.hass.async_add_executor_job(self._update_fritz_devices)
try:
new_data = await self.hass.async_add_executor_job(
self._update_fritz_devices
)
except (RequestConnectionError, HTTPError) as ex:
LOGGER.debug(
"Reload %s due to error '%s' to ensure proper re-login",
self.config_entry.title,
ex,
)
self.hass.config_entries.async_schedule_reload(self.config_entry.entry_id)
raise UpdateFailed from ex
for device in new_data.devices.values():
# create device registry entry for new main devices

View File

@@ -2,10 +2,12 @@
from __future__ import annotations
from aiohttp import ClientResponseError
from homelink.mqtt_provider import MQTTProvider
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
from . import oauth2
@@ -18,6 +20,10 @@ PLATFORMS: list[Platform] = [Platform.EVENT]
async def async_setup_entry(hass: HomeAssistant, entry: HomeLinkConfigEntry) -> bool:
"""Set up homelink from a config entry."""
auth_implementation = oauth2.SRPAuthImplementation(hass, DOMAIN)
try:
await auth_implementation.async_refresh_token(entry.data["token"])
except ClientResponseError as err:
raise ConfigEntryAuthFailed(err) from err
config_entry_oauth2_flow.async_register_implementation(
hass, DOMAIN, auth_implementation

View File

@@ -1,5 +1,6 @@
"""Config flow for homelink."""
from collections.abc import Mapping
import logging
from typing import Any
@@ -8,8 +9,8 @@ from homelink.auth.srp_auth import SRPAuth
import jwt
import voluptuous as vol
from homeassistant.config_entries import ConfigFlowResult
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlowResult
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_UNIQUE_ID
from homeassistant.helpers.config_entry_oauth2_flow import AbstractOAuth2FlowHandler
from .const import DOMAIN
@@ -56,9 +57,13 @@ class SRPFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
tokens["AuthenticationResult"]["AccessToken"],
options={"verify_signature": False},
)
await self.async_set_unique_id(access_token["sub"])
self._abort_if_unique_id_configured()
self.external_data = {"tokens": tokens}
sub = access_token["sub"]
await self.async_set_unique_id(sub)
self.external_data = {
"tokens": tokens,
CONF_UNIQUE_ID: sub,
CONF_EMAIL: user_input[CONF_EMAIL].strip().lower(),
}
return await self.async_step_creation()
return self.async_show_form(
@@ -68,3 +73,36 @@ class SRPFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN):
),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
if user_input is None:
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str}
),
)
return await self.async_step_user(user_input)
async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult:
"""Create an oauth config entry or update existing entry for reauth."""
await self.async_set_unique_id(self.external_data[CONF_UNIQUE_ID])
entry_title = self.context.get("title_placeholders", {"name": "HomeLink"})[
"name"
]
if self.source == SOURCE_REAUTH:
self._abort_if_unique_id_mismatch()
return self.async_update_reload_and_abort(
self._get_reauth_entry(), data_updates=data, title=entry_title
)
self._abort_if_unique_id_configured()
return self.async_create_entry(data=data, title=entry_title)

View File

@@ -1,7 +1,5 @@
"""Constants for the homelink integration."""
DOMAIN = "gentex_homelink"
OAUTH2_TOKEN = "https://auth.homelinkcloud.com/oauth2/token"
POLLING_INTERVAL = 5
EVENT_PRESSED = "Pressed"
OAUTH2_TOKEN_URL = "https://auth.homelinkcloud.com/oauth2/token"

View File

@@ -13,7 +13,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import OAUTH2_TOKEN
from .const import OAUTH2_TOKEN_URL
_LOGGER = logging.getLogger(__name__)
@@ -59,8 +59,8 @@ class SRPAuthImplementation(config_entry_oauth2_flow.AbstractOAuth2Implementatio
data["client_id"] = self.client_id
_LOGGER.debug("Sending token request to %s", OAUTH2_TOKEN)
resp = await session.post(OAUTH2_TOKEN, data=data)
_LOGGER.debug("Sending token request to %s", OAUTH2_TOKEN_URL)
resp = await session.post(OAUTH2_TOKEN_URL, data=data)
if resp.status >= 400:
try:
error_response = await resp.json()

View File

@@ -36,7 +36,7 @@ rules:
integration-owner: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: todo
reauthentication-flow: done
test-coverage: todo
# Gold

View File

@@ -11,6 +11,8 @@
"oauth_implementation_unavailable": "[%key:common::config_flow::abort::oauth2_implementation_unavailable%]",
"oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]",
"oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unique_id_mismatch": "Please log in using the same account, or create a new entry.",
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]"
},
"create_entry": {
@@ -18,12 +20,24 @@
},
"error": {
"srp_auth_failed": "Error authenticating HomeLink account",
"unknown": "An unknown error occurred. Please try again later"
"unknown": "An unknown error occurred. Please try again later."
},
"step": {
"pick_implementation": {
"title": "[%key:common::config_flow::title::oauth2_pick_implementation%]"
},
"reauth_confirm": {
"data": {
"email": "[%key:common::config_flow::data::email%]",
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"email": "[%key:component::gentex_homelink::config::step::user::data_description::email%]",
"password": "[%key:component::gentex_homelink::config::step::user::data_description::password%]"
},
"description": "The HomeLink integration needs to re-authenticate your account",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"email": "[%key:common::config_flow::data::email%]",

View File

@@ -0,0 +1,101 @@
rules:
# Other comments:
# - we could consider removing the air quality entity removal
# Bronze
action-setup:
status: exempt
comment: No custom actions are defined.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage:
status: todo
comment:
We should have the happy flow as the first test, which can be merged with test_show_form.
The config flow tests are missing adding a duplicate entry test.
config-flow:
status: todo
comment: Limit the scope of the try block in the user step
dependency-transparency: done
docs-actions:
status: exempt
comment: No custom actions are defined.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data:
status: todo
comment: No direct need to wrap the coordinator in a dataclass to store in the config entry
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: No options flow
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow:
status: exempt
comment: This integration does not require authentication.
test-coverage:
status: todo
comment:
The `test_async_setup_entry` should test the state of the mock config entry, instead of an entity state
The `test_availability` doesn't really do what it says it does, and this is now already tested via the snapshot tests.
# Gold
devices: done
diagnostics: done
discovery-update-info:
status: exempt
comment: This integration is a cloud service and thus does not support discovery.
discovery:
status: exempt
comment: This integration is a cloud service and thus does not support discovery.
docs-data-update: done
docs-examples: done
docs-known-limitations: done
docs-supported-devices:
status: exempt
comment: This is an service, which doesn't integrate with any devices.
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: done
dynamic-devices:
status: exempt
comment: This integration does not have devices.
entity-category: done
entity-device-class:
status: todo
comment: We can use the CO device class for the carbon monoxide sensor
entity-disabled-by-default: done
entity-translations:
status: todo
comment: We can remove the options state_attributes.
exception-translations: done
icon-translations: done
reconfiguration-flow:
status: exempt
comment: Only parameter that could be changed station_id would force a new config entry.
repair-issues: done
stale-devices:
status: exempt
comment: This integration does not have devices.
# Platinum
async-dependency: done
inject-websession: done
strict-typing: done

View File

@@ -5,14 +5,25 @@ import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util
from .const import CONF_TIME
from .const import CONF_TIME, DOMAIN
from .services import async_setup_services
PLATFORMS = [Platform.SENSOR]
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the Google Travel Time component."""
async_setup_services(hass)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Google Maps Travel Time from a config entry."""

View File

@@ -24,9 +24,7 @@ from homeassistant.helpers.selector import (
from homeassistant.util.unit_system import US_CUSTOMARY_SYSTEM
from .const import (
ALL_LANGUAGES,
ARRIVAL_TIME,
AVOID_OPTIONS,
CONF_ARRIVAL_TIME,
CONF_AVOID,
CONF_DEPARTURE_TIME,
@@ -41,12 +39,7 @@ from .const import (
DEFAULT_NAME,
DEPARTURE_TIME,
DOMAIN,
TIME_TYPES,
TRAFFIC_MODELS,
TRANSIT_PREFS,
TRANSPORT_TYPES,
TRAVEL_MODES,
UNITS,
UNITS_IMPERIAL,
UNITS_METRIC,
)
@@ -56,6 +49,15 @@ from .helpers import (
UnknownException,
validate_config_entry,
)
from .schemas import (
AVOID_SELECTOR,
LANGUAGE_SELECTOR,
TIME_TYPE_SELECTOR,
TRAFFIC_MODEL_SELECTOR,
TRANSIT_MODE_SELECTOR,
TRANSIT_ROUTING_PREFERENCE_SELECTOR,
UNITS_SELECTOR,
)
RECONFIGURE_SCHEMA = vol.Schema(
{
@@ -73,6 +75,13 @@ CONFIG_SCHEMA = RECONFIGURE_SCHEMA.extend(
OPTIONS_SCHEMA = vol.Schema(
{
vol.Optional(CONF_LANGUAGE): LANGUAGE_SELECTOR,
vol.Optional(CONF_AVOID): AVOID_SELECTOR,
vol.Optional(CONF_TRAFFIC_MODEL): TRAFFIC_MODEL_SELECTOR,
vol.Optional(CONF_TRANSIT_MODE): TRANSIT_MODE_SELECTOR,
vol.Optional(
CONF_TRANSIT_ROUTING_PREFERENCE
): TRANSIT_ROUTING_PREFERENCE_SELECTOR,
vol.Required(CONF_MODE): SelectSelector(
SelectSelectorConfig(
options=TRAVEL_MODES,
@@ -81,62 +90,9 @@ OPTIONS_SCHEMA = vol.Schema(
translation_key=CONF_MODE,
)
),
vol.Optional(CONF_LANGUAGE): SelectSelector(
SelectSelectorConfig(
options=sorted(ALL_LANGUAGES),
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_LANGUAGE,
)
),
vol.Optional(CONF_AVOID): SelectSelector(
SelectSelectorConfig(
options=AVOID_OPTIONS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_AVOID,
)
),
vol.Required(CONF_UNITS): SelectSelector(
SelectSelectorConfig(
options=UNITS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_UNITS,
)
),
vol.Required(CONF_TIME_TYPE): SelectSelector(
SelectSelectorConfig(
options=TIME_TYPES,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TIME_TYPE,
)
),
vol.Required(CONF_UNITS): UNITS_SELECTOR,
vol.Required(CONF_TIME_TYPE): TIME_TYPE_SELECTOR,
vol.Optional(CONF_TIME): TimeSelector(),
vol.Optional(CONF_TRAFFIC_MODEL): SelectSelector(
SelectSelectorConfig(
options=TRAFFIC_MODELS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TRAFFIC_MODEL,
)
),
vol.Optional(CONF_TRANSIT_MODE): SelectSelector(
SelectSelectorConfig(
options=TRANSPORT_TYPES,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TRANSIT_MODE,
)
),
vol.Optional(CONF_TRANSIT_ROUTING_PREFERENCE): SelectSelector(
SelectSelectorConfig(
options=TRANSIT_PREFS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TRANSIT_ROUTING_PREFERENCE,
)
),
}
)

View File

@@ -98,6 +98,7 @@ TRANSPORT_TYPES_TO_GOOGLE_SDK_ENUM = {
"rail": TransitPreferences.TransitTravelMode.RAIL,
}
TRAVEL_MODES = ["driving", "walking", "bicycling", "transit"]
TRAVEL_MODES_WITHOUT_TRANSIT = ["driving", "walking", "bicycling"]
TRAVEL_MODES_TO_GOOGLE_SDK_ENUM = {
"driving": RouteTravelMode.DRIVE,
"walking": RouteTravelMode.WALK,

View File

@@ -1,5 +1,6 @@
"""Helpers for Google Time Travel integration."""
import datetime
import logging
from google.api_core.client_options import ClientOptions
@@ -12,11 +13,16 @@ from google.api_core.exceptions import (
)
from google.maps.routing_v2 import (
ComputeRoutesRequest,
ComputeRoutesResponse,
Location,
RouteModifiers,
RoutesAsyncClient,
RouteTravelMode,
RoutingPreference,
TransitPreferences,
Waypoint,
)
from google.protobuf import timestamp_pb2
from google.type import latlng_pb2
import voluptuous as vol
@@ -29,12 +35,40 @@ from homeassistant.helpers.issue_registry import (
async_delete_issue,
)
from homeassistant.helpers.location import find_coordinates
from homeassistant.util import dt as dt_util
from .const import DOMAIN
from .const import (
DOMAIN,
TRAFFIC_MODELS_TO_GOOGLE_SDK_ENUM,
TRANSIT_PREFS_TO_GOOGLE_SDK_ENUM,
TRANSPORT_TYPES_TO_GOOGLE_SDK_ENUM,
UNITS_TO_GOOGLE_SDK_ENUM,
)
_LOGGER = logging.getLogger(__name__)
def convert_time(time_str: str) -> timestamp_pb2.Timestamp:
"""Convert a string like '08:00' to a google pb2 Timestamp.
If the time is in the past, it will be shifted to the next day.
"""
parsed_time = dt_util.parse_time(time_str)
if parsed_time is None:
raise ValueError(f"Invalid time format: {time_str}")
start_of_day = dt_util.start_of_local_day()
combined = datetime.datetime.combine(
start_of_day,
parsed_time,
start_of_day.tzinfo,
)
if combined < dt_util.now():
combined = combined + datetime.timedelta(days=1)
timestamp = timestamp_pb2.Timestamp()
timestamp.FromDatetime(dt=combined)
return timestamp
def convert_to_waypoint(hass: HomeAssistant, location: str) -> Waypoint | None:
"""Convert a location to a Waypoint.
@@ -123,3 +157,78 @@ def create_routes_api_disabled_issue(hass: HomeAssistant, entry: ConfigEntry) ->
def delete_routes_api_disabled_issue(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Delete the issue for the Routes API being disabled."""
async_delete_issue(hass, DOMAIN, f"routes_api_disabled_{entry.entry_id}")
async def async_compute_routes(
client: RoutesAsyncClient,
origin: str,
destination: str,
hass: HomeAssistant,
travel_mode: int,
units: str,
language: str | None = None,
avoid: str | None = None,
traffic_model: str | None = None,
transit_mode: str | None = None,
transit_routing_preference: str | None = None,
departure_time: str | None = None,
arrival_time: str | None = None,
field_mask: str = "routes.duration,routes.distanceMeters,routes.localized_values",
) -> ComputeRoutesResponse | None:
"""Compute routes using Google Routes API."""
origin_waypoint = convert_to_waypoint(hass, origin)
destination_waypoint = convert_to_waypoint(hass, destination)
if origin_waypoint is None or destination_waypoint is None:
return None
route_modifiers = None
routing_preference = None
if travel_mode == RouteTravelMode.DRIVE:
routing_preference = RoutingPreference.TRAFFIC_AWARE_OPTIMAL
route_modifiers = RouteModifiers(
avoid_tolls=avoid == "tolls",
avoid_ferries=avoid == "ferries",
avoid_highways=avoid == "highways",
avoid_indoor=avoid == "indoor",
)
transit_preferences = None
if travel_mode == RouteTravelMode.TRANSIT:
transit_routing_pref = None
transit_travel_mode = (
TransitPreferences.TransitTravelMode.TRANSIT_TRAVEL_MODE_UNSPECIFIED
)
if transit_routing_preference is not None:
transit_routing_pref = TRANSIT_PREFS_TO_GOOGLE_SDK_ENUM[
transit_routing_preference
]
if transit_mode is not None:
transit_travel_mode = TRANSPORT_TYPES_TO_GOOGLE_SDK_ENUM[transit_mode]
transit_preferences = TransitPreferences(
routing_preference=transit_routing_pref,
allowed_travel_modes=[transit_travel_mode],
)
departure_timestamp = convert_time(departure_time) if departure_time else None
arrival_timestamp = convert_time(arrival_time) if arrival_time else None
request = ComputeRoutesRequest(
origin=origin_waypoint,
destination=destination_waypoint,
travel_mode=travel_mode,
routing_preference=routing_preference,
departure_time=departure_timestamp,
arrival_time=arrival_timestamp,
route_modifiers=route_modifiers,
language_code=language,
units=UNITS_TO_GOOGLE_SDK_ENUM[units],
traffic_model=TRAFFIC_MODELS_TO_GOOGLE_SDK_ENUM[traffic_model]
if traffic_model
else None,
transit_preferences=transit_preferences,
)
return await client.compute_routes(
request, metadata=[("x-goog-fieldmask", field_mask)]
)

View File

@@ -0,0 +1,10 @@
{
"services": {
"get_transit_times": {
"service": "mdi:bus"
},
"get_travel_times": {
"service": "mdi:routes"
}
}
}

View File

@@ -0,0 +1,137 @@
"""Schemas for the Google Travel Time integration."""
import voluptuous as vol
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_LANGUAGE, CONF_MODE
from homeassistant.helpers.selector import (
ConfigEntrySelector,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
TextSelector,
TimeSelector,
)
from .const import (
ALL_LANGUAGES,
AVOID_OPTIONS,
CONF_ARRIVAL_TIME,
CONF_AVOID,
CONF_DEPARTURE_TIME,
CONF_DESTINATION,
CONF_ORIGIN,
CONF_TIME_TYPE,
CONF_TRAFFIC_MODEL,
CONF_TRANSIT_MODE,
CONF_TRANSIT_ROUTING_PREFERENCE,
CONF_UNITS,
DOMAIN,
TIME_TYPES,
TRAFFIC_MODELS,
TRANSIT_PREFS,
TRANSPORT_TYPES,
TRAVEL_MODES_WITHOUT_TRANSIT,
UNITS,
UNITS_METRIC,
)
LANGUAGE_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=sorted(ALL_LANGUAGES),
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_LANGUAGE,
)
)
AVOID_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=AVOID_OPTIONS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_AVOID,
)
)
TRAFFIC_MODEL_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=TRAFFIC_MODELS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TRAFFIC_MODEL,
)
)
TRANSIT_MODE_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=TRANSPORT_TYPES,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TRANSIT_MODE,
)
)
TRANSIT_ROUTING_PREFERENCE_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=TRANSIT_PREFS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TRANSIT_ROUTING_PREFERENCE,
)
)
UNITS_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=UNITS,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_UNITS,
)
)
TIME_TYPE_SELECTOR = SelectSelector(
SelectSelectorConfig(
options=TIME_TYPES,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_TIME_TYPE,
)
)
_SERVICE_BASE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_CONFIG_ENTRY_ID): ConfigEntrySelector(
{"integration": DOMAIN}
),
vol.Required(CONF_ORIGIN): TextSelector(),
vol.Required(CONF_DESTINATION): TextSelector(),
vol.Optional(CONF_UNITS, default=UNITS_METRIC): UNITS_SELECTOR,
vol.Optional(CONF_LANGUAGE): LANGUAGE_SELECTOR,
}
)
SERVICE_GET_TRAVEL_TIMES_SCHEMA = _SERVICE_BASE_SCHEMA.extend(
{
vol.Optional(CONF_MODE, default="driving"): SelectSelector(
SelectSelectorConfig(
options=TRAVEL_MODES_WITHOUT_TRANSIT,
sort=True,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_MODE,
)
),
vol.Optional(CONF_AVOID): AVOID_SELECTOR,
vol.Optional(CONF_TRAFFIC_MODEL): TRAFFIC_MODEL_SELECTOR,
vol.Optional(CONF_DEPARTURE_TIME): TimeSelector(),
}
)
SERVICE_GET_TRANSIT_TIMES_SCHEMA = _SERVICE_BASE_SCHEMA.extend(
{
vol.Optional(CONF_TRANSIT_MODE): TRANSIT_MODE_SELECTOR,
vol.Optional(
CONF_TRANSIT_ROUTING_PREFERENCE
): TRANSIT_ROUTING_PREFERENCE_SELECTOR,
vol.Exclusive(CONF_DEPARTURE_TIME, "time"): TimeSelector(),
vol.Exclusive(CONF_ARRIVAL_TIME, "time"): TimeSelector(),
}
)

View File

@@ -4,20 +4,11 @@ from __future__ import annotations
import datetime
import logging
from typing import TYPE_CHECKING, Any
from typing import Any
from google.api_core.client_options import ClientOptions
from google.api_core.exceptions import GoogleAPIError, PermissionDenied
from google.maps.routing_v2 import (
ComputeRoutesRequest,
Route,
RouteModifiers,
RoutesAsyncClient,
RouteTravelMode,
RoutingPreference,
TransitPreferences,
)
from google.protobuf import timestamp_pb2
from google.maps.routing_v2 import Route, RoutesAsyncClient
from homeassistant.components.sensor import (
SensorDeviceClass,
@@ -38,7 +29,6 @@ from homeassistant.core import CoreState, HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.location import find_coordinates
from homeassistant.util import dt as dt_util
from .const import (
ATTRIBUTION,
@@ -53,14 +43,10 @@ from .const import (
CONF_UNITS,
DEFAULT_NAME,
DOMAIN,
TRAFFIC_MODELS_TO_GOOGLE_SDK_ENUM,
TRANSIT_PREFS_TO_GOOGLE_SDK_ENUM,
TRANSPORT_TYPES_TO_GOOGLE_SDK_ENUM,
TRAVEL_MODES_TO_GOOGLE_SDK_ENUM,
UNITS_TO_GOOGLE_SDK_ENUM,
)
from .helpers import (
convert_to_waypoint,
async_compute_routes,
create_routes_api_disabled_issue,
delete_routes_api_disabled_issue,
)
@@ -70,28 +56,6 @@ _LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = datetime.timedelta(minutes=10)
FIELD_MASK = "routes.duration,routes.localized_values"
def convert_time(time_str: str) -> timestamp_pb2.Timestamp | None:
"""Convert a string like '08:00' to a google pb2 Timestamp.
If the time is in the past, it will be shifted to the next day.
"""
parsed_time = dt_util.parse_time(time_str)
if TYPE_CHECKING:
assert parsed_time is not None
start_of_day = dt_util.start_of_local_day()
combined = datetime.datetime.combine(
start_of_day,
parsed_time,
start_of_day.tzinfo,
)
if combined < dt_util.now():
combined = combined + datetime.timedelta(days=1)
timestamp = timestamp_pb2.Timestamp()
timestamp.FromDatetime(dt=combined)
return timestamp
SENSOR_DESCRIPTIONS = [
SensorEntityDescription(
key="duration",
@@ -203,67 +167,6 @@ class GoogleTravelTimeSensor(SensorEntity):
self._config_entry.options[CONF_MODE]
]
if (
departure_time := self._config_entry.options.get(CONF_DEPARTURE_TIME)
) is not None:
departure_time = convert_time(departure_time)
if (
arrival_time := self._config_entry.options.get(CONF_ARRIVAL_TIME)
) is not None:
arrival_time = convert_time(arrival_time)
if travel_mode != RouteTravelMode.TRANSIT:
arrival_time = None
traffic_model = None
routing_preference = None
route_modifiers = None
if travel_mode == RouteTravelMode.DRIVE:
if (
options_traffic_model := self._config_entry.options.get(
CONF_TRAFFIC_MODEL
)
) is not None:
traffic_model = TRAFFIC_MODELS_TO_GOOGLE_SDK_ENUM[options_traffic_model]
routing_preference = RoutingPreference.TRAFFIC_AWARE_OPTIMAL
route_modifiers = RouteModifiers(
avoid_tolls=self._config_entry.options.get(CONF_AVOID) == "tolls",
avoid_ferries=self._config_entry.options.get(CONF_AVOID) == "ferries",
avoid_highways=self._config_entry.options.get(CONF_AVOID) == "highways",
avoid_indoor=self._config_entry.options.get(CONF_AVOID) == "indoor",
)
transit_preferences = None
if travel_mode == RouteTravelMode.TRANSIT:
transit_routing_preference = None
transit_travel_mode = (
TransitPreferences.TransitTravelMode.TRANSIT_TRAVEL_MODE_UNSPECIFIED
)
if (
option_transit_preferences := self._config_entry.options.get(
CONF_TRANSIT_ROUTING_PREFERENCE
)
) is not None:
transit_routing_preference = TRANSIT_PREFS_TO_GOOGLE_SDK_ENUM[
option_transit_preferences
]
if (
option_transit_mode := self._config_entry.options.get(CONF_TRANSIT_MODE)
) is not None:
transit_travel_mode = TRANSPORT_TYPES_TO_GOOGLE_SDK_ENUM[
option_transit_mode
]
transit_preferences = TransitPreferences(
routing_preference=transit_routing_preference,
allowed_travel_modes=[transit_travel_mode],
)
language = None
if (
options_language := self._config_entry.options.get(CONF_LANGUAGE)
) is not None:
language = options_language
self._resolved_origin = find_coordinates(self.hass, self._origin)
self._resolved_destination = find_coordinates(self.hass, self._destination)
_LOGGER.debug(
@@ -272,22 +175,24 @@ class GoogleTravelTimeSensor(SensorEntity):
self._resolved_destination,
)
if self._resolved_destination is not None and self._resolved_origin is not None:
request = ComputeRoutesRequest(
origin=convert_to_waypoint(self.hass, self._resolved_origin),
destination=convert_to_waypoint(self.hass, self._resolved_destination),
travel_mode=travel_mode,
routing_preference=routing_preference,
departure_time=departure_time,
arrival_time=arrival_time,
route_modifiers=route_modifiers,
language_code=language,
units=UNITS_TO_GOOGLE_SDK_ENUM[self._config_entry.options[CONF_UNITS]],
traffic_model=traffic_model,
transit_preferences=transit_preferences,
)
try:
response = await self._client.compute_routes(
request, metadata=[("x-goog-fieldmask", FIELD_MASK)]
response = await async_compute_routes(
client=self._client,
origin=self._resolved_origin,
destination=self._resolved_destination,
hass=self.hass,
travel_mode=travel_mode,
units=self._config_entry.options[CONF_UNITS],
language=self._config_entry.options.get(CONF_LANGUAGE),
avoid=self._config_entry.options.get(CONF_AVOID),
traffic_model=self._config_entry.options.get(CONF_TRAFFIC_MODEL),
transit_mode=self._config_entry.options.get(CONF_TRANSIT_MODE),
transit_routing_preference=self._config_entry.options.get(
CONF_TRANSIT_ROUTING_PREFERENCE
),
departure_time=self._config_entry.options.get(CONF_DEPARTURE_TIME),
arrival_time=self._config_entry.options.get(CONF_ARRIVAL_TIME),
field_mask=FIELD_MASK,
)
_LOGGER.debug("Received response: %s", response)
if response is not None and len(response.routes) > 0:

View File

@@ -0,0 +1,167 @@
"""Services for the Google Travel Time integration."""
from typing import cast
from google.api_core.client_options import ClientOptions
from google.api_core.exceptions import GoogleAPIError, PermissionDenied
from google.maps.routing_v2 import RoutesAsyncClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_CONFIG_ENTRY_ID,
CONF_API_KEY,
CONF_LANGUAGE,
CONF_MODE,
)
from homeassistant.core import (
HomeAssistant,
ServiceCall,
ServiceResponse,
SupportsResponse,
callback,
)
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.service import async_get_config_entry
from .const import (
CONF_ARRIVAL_TIME,
CONF_AVOID,
CONF_DEPARTURE_TIME,
CONF_DESTINATION,
CONF_ORIGIN,
CONF_TRAFFIC_MODEL,
CONF_TRANSIT_MODE,
CONF_TRANSIT_ROUTING_PREFERENCE,
CONF_UNITS,
DOMAIN,
TRAVEL_MODES_TO_GOOGLE_SDK_ENUM,
)
from .helpers import (
async_compute_routes,
create_routes_api_disabled_issue,
delete_routes_api_disabled_issue,
)
from .schemas import SERVICE_GET_TRANSIT_TIMES_SCHEMA, SERVICE_GET_TRAVEL_TIMES_SCHEMA
SERVICE_GET_TRAVEL_TIMES = "get_travel_times"
SERVICE_GET_TRANSIT_TIMES = "get_transit_times"
def _build_routes_response(response) -> list[dict]:
"""Build the routes response from the API response."""
if response is None or not response.routes:
return []
return [
{
"duration": route.duration.seconds,
"duration_text": route.localized_values.duration.text,
"static_duration_text": route.localized_values.static_duration.text,
"distance_meters": route.distance_meters,
"distance_text": route.localized_values.distance.text,
}
for route in response.routes
]
def _raise_service_error(
hass: HomeAssistant, entry: ConfigEntry, exc: Exception
) -> None:
"""Raise a HomeAssistantError based on the exception."""
if isinstance(exc, PermissionDenied):
create_routes_api_disabled_issue(hass, entry)
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="permission_denied",
) from exc
if isinstance(exc, GoogleAPIError):
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="api_error",
translation_placeholders={"error": str(exc)},
) from exc
raise exc
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for the Google Travel Time integration."""
async def async_get_travel_times_service(service: ServiceCall) -> ServiceResponse:
"""Handle the service call to get travel times (non-transit modes)."""
entry = async_get_config_entry(
service.hass, DOMAIN, service.data[ATTR_CONFIG_ENTRY_ID]
)
api_key = entry.data[CONF_API_KEY]
travel_mode = TRAVEL_MODES_TO_GOOGLE_SDK_ENUM[service.data[CONF_MODE]]
client_options = ClientOptions(api_key=api_key)
client = RoutesAsyncClient(client_options=client_options)
try:
response = await async_compute_routes(
client=client,
origin=service.data[CONF_ORIGIN],
destination=service.data[CONF_DESTINATION],
hass=hass,
travel_mode=travel_mode,
units=service.data[CONF_UNITS],
language=service.data.get(CONF_LANGUAGE),
avoid=service.data.get(CONF_AVOID),
traffic_model=service.data.get(CONF_TRAFFIC_MODEL),
departure_time=service.data.get(CONF_DEPARTURE_TIME),
)
except Exception as ex: # noqa: BLE001
_raise_service_error(hass, entry, ex)
delete_routes_api_disabled_issue(hass, entry)
return cast(ServiceResponse, {"routes": _build_routes_response(response)})
async def async_get_transit_times_service(service: ServiceCall) -> ServiceResponse:
"""Handle the service call to get transit times."""
entry = async_get_config_entry(
service.hass, DOMAIN, service.data[ATTR_CONFIG_ENTRY_ID]
)
api_key = entry.data[CONF_API_KEY]
client_options = ClientOptions(api_key=api_key)
client = RoutesAsyncClient(client_options=client_options)
try:
response = await async_compute_routes(
client=client,
origin=service.data[CONF_ORIGIN],
destination=service.data[CONF_DESTINATION],
hass=hass,
travel_mode=TRAVEL_MODES_TO_GOOGLE_SDK_ENUM["transit"],
units=service.data[CONF_UNITS],
language=service.data.get(CONF_LANGUAGE),
transit_mode=service.data.get(CONF_TRANSIT_MODE),
transit_routing_preference=service.data.get(
CONF_TRANSIT_ROUTING_PREFERENCE
),
departure_time=service.data.get(CONF_DEPARTURE_TIME),
arrival_time=service.data.get(CONF_ARRIVAL_TIME),
)
except Exception as ex: # noqa: BLE001
_raise_service_error(hass, entry, ex)
delete_routes_api_disabled_issue(hass, entry)
return cast(ServiceResponse, {"routes": _build_routes_response(response)})
hass.services.async_register(
DOMAIN,
SERVICE_GET_TRAVEL_TIMES,
async_get_travel_times_service,
SERVICE_GET_TRAVEL_TIMES_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
hass.services.async_register(
DOMAIN,
SERVICE_GET_TRANSIT_TIMES,
async_get_transit_times_service,
SERVICE_GET_TRANSIT_TIMES_SCHEMA,
supports_response=SupportsResponse.ONLY,
)

View File

@@ -0,0 +1,118 @@
get_travel_times:
fields:
config_entry_id:
required: true
selector:
config_entry:
integration: google_travel_time
origin:
required: true
example: "1600 Amphitheatre Parkway, Mountain View, CA"
selector:
text:
destination:
required: true
example: "1 Infinite Loop, Cupertino, CA"
selector:
text:
mode:
default: "driving"
selector:
select:
translation_key: mode
options:
- driving
- walking
- bicycling
units:
default: "metric"
selector:
select:
translation_key: units
options:
- metric
- imperial
language:
required: false
selector:
language:
avoid:
required: false
selector:
select:
translation_key: avoid
options:
- tolls
- highways
- ferries
- indoor
traffic_model:
required: false
selector:
select:
translation_key: traffic_model
options:
- best_guess
- pessimistic
- optimistic
departure_time:
required: false
selector:
time:
get_transit_times:
fields:
config_entry_id:
required: true
selector:
config_entry:
integration: google_travel_time
origin:
required: true
example: "1600 Amphitheatre Parkway, Mountain View, CA"
selector:
text:
destination:
required: true
example: "1 Infinite Loop, Cupertino, CA"
selector:
text:
units:
default: "metric"
selector:
select:
translation_key: units
options:
- metric
- imperial
language:
required: false
selector:
language:
transit_mode:
required: false
selector:
select:
translation_key: transit_mode
options:
- bus
- subway
- train
- tram
- rail
transit_routing_preference:
required: false
selector:
select:
translation_key: transit_routing_preference
options:
- less_walking
- fewer_transfers
departure_time:
required: false
selector:
time:
arrival_time:
required: false
selector:
time:

View File

@@ -30,6 +30,14 @@
}
}
},
"exceptions": {
"api_error": {
"message": "Google API error: {error}"
},
"permission_denied": {
"message": "[%key:component::google_travel_time::config::error::permission_denied%]"
}
},
"issues": {
"routes_api_disabled": {
"description": "Your Google Travel Time integration `{entry_title}` uses an API key which does not have the Routes API enabled.\n\n Please follow the instructions to [enable the API for your project]({enable_api_url}) and make sure your [API key restrictions]({api_key_restrictions_url}) allow access to the Routes API.\n\n After enabling the API this issue will be resolved automatically.",
@@ -107,5 +115,91 @@
}
}
},
"services": {
"get_transit_times": {
"description": "Retrieves route alternatives and travel times between two locations using public transit.",
"fields": {
"arrival_time": {
"description": "The desired arrival time.",
"name": "Arrival time"
},
"config_entry_id": {
"description": "[%key:component::google_travel_time::services::get_travel_times::fields::config_entry_id::description%]",
"name": "[%key:component::google_travel_time::services::get_travel_times::fields::config_entry_id::name%]"
},
"departure_time": {
"description": "[%key:component::google_travel_time::services::get_travel_times::fields::departure_time::description%]",
"name": "[%key:component::google_travel_time::services::get_travel_times::fields::departure_time::name%]"
},
"destination": {
"description": "[%key:component::google_travel_time::services::get_travel_times::fields::destination::description%]",
"name": "[%key:component::google_travel_time::config::step::user::data::destination%]"
},
"language": {
"description": "[%key:component::google_travel_time::services::get_travel_times::fields::language::description%]",
"name": "[%key:common::config_flow::data::language%]"
},
"origin": {
"description": "[%key:component::google_travel_time::services::get_travel_times::fields::origin::description%]",
"name": "[%key:component::google_travel_time::config::step::user::data::origin%]"
},
"transit_mode": {
"description": "The preferred transit mode.",
"name": "[%key:component::google_travel_time::options::step::init::data::transit_mode%]"
},
"transit_routing_preference": {
"description": "The transit routing preference.",
"name": "[%key:component::google_travel_time::options::step::init::data::transit_routing_preference%]"
},
"units": {
"description": "[%key:component::google_travel_time::services::get_travel_times::fields::units::description%]",
"name": "[%key:component::google_travel_time::options::step::init::data::units%]"
}
},
"name": "Get transit times"
},
"get_travel_times": {
"description": "Retrieves route alternatives and travel times between two locations.",
"fields": {
"avoid": {
"description": "Features to avoid when calculating the route.",
"name": "[%key:component::google_travel_time::options::step::init::data::avoid%]"
},
"config_entry_id": {
"description": "The config entry to use for this action.",
"name": "Config entry"
},
"departure_time": {
"description": "The desired departure time.",
"name": "Departure time"
},
"destination": {
"description": "The destination of the route.",
"name": "[%key:component::google_travel_time::config::step::user::data::destination%]"
},
"language": {
"description": "The language to use for the response.",
"name": "[%key:common::config_flow::data::language%]"
},
"mode": {
"description": "The mode of transportation.",
"name": "[%key:component::google_travel_time::options::step::init::data::mode%]"
},
"origin": {
"description": "The origin of the route.",
"name": "[%key:component::google_travel_time::config::step::user::data::origin%]"
},
"traffic_model": {
"description": "The traffic model to use when calculating driving routes.",
"name": "[%key:component::google_travel_time::options::step::init::data::traffic_model%]"
},
"units": {
"description": "Which unit system to use.",
"name": "[%key:component::google_travel_time::options::step::init::data::units%]"
}
},
"name": "Get travel times"
}
},
"title": "Google Maps Travel Time"
}

View File

@@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import (
CONF_DISCOVERY_INTERVAL_DEFAULT,
CONF_LISTENING_PORT_DEFAULT,
CONF_MULTICAST_ADDRESS_DEFAULT,
CONF_TARGET_PORT_DEFAULT,
@@ -49,7 +50,7 @@ class GoveeLocalApiCoordinator(DataUpdateCoordinator[list[GoveeDevice]]):
broadcast_port=CONF_TARGET_PORT_DEFAULT,
listening_port=CONF_LISTENING_PORT_DEFAULT,
discovery_enabled=True,
discovery_interval=1,
discovery_interval=CONF_DISCOVERY_INTERVAL_DEFAULT,
update_enabled=False,
)
for source_ip in source_ips

View File

@@ -16,6 +16,7 @@ from homeassistant.helpers.typing import ConfigType
from .const import (
AUTH_API_TOKEN,
AUTH_PASSWORD,
CACHED_API_KEY,
CONF_AUTH_TYPE,
CONF_PLANT_ID,
DEFAULT_PLANT_ID,
@@ -41,15 +42,163 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
def get_device_list_classic(
api: growattServer.GrowattApi, config: Mapping[str, str]
) -> tuple[list[dict[str, str]], str]:
"""Retrieve the device list for the selected plant."""
plant_id = config[CONF_PLANT_ID]
async def async_migrate_entry(
hass: HomeAssistant, config_entry: GrowattConfigEntry
) -> bool:
"""Migrate old config entries.
# Log in to api and fetch first plant if no plant id is defined.
Migration from version 1.0 to 1.1:
- Resolves DEFAULT_PLANT_ID (legacy value "0") to actual plant_id
- Only applies to Classic API (username/password authentication)
- Caches the logged-in API instance to avoid growatt server API rate limiting
Rate Limiting Workaround:
The Growatt Classic API rate-limits individual endpoints (login, plant_list,
device_list) with 5-minute windows. Without caching, the sequence would be:
Migration: login() → plant_list()
Setup: login() → device_list()
This results in 2 login() calls within seconds, triggering rate limits.
By caching the API instance (which contains the authenticated session), we
achieve:
Migration: login() → plant_list() → [cache API instance]
Setup: [reuse cached API] → device_list()
This reduces to just 1 login() call during the migration+setup cycle and prevent account lockout.
"""
_LOGGER.debug(
"Migrating config entry from version %s.%s",
config_entry.version,
config_entry.minor_version,
)
# Migrate from version 1.0 to 1.1
if config_entry.version == 1 and config_entry.minor_version < 1:
config = config_entry.data
# First, ensure auth_type field exists (legacy config entry migration)
# This handles config entries created before auth_type was introduced
if CONF_AUTH_TYPE not in config:
new_data = dict(config_entry.data)
# Detect auth type based on which fields are present
if CONF_TOKEN in config:
new_data[CONF_AUTH_TYPE] = AUTH_API_TOKEN
hass.config_entries.async_update_entry(config_entry, data=new_data)
config = config_entry.data
_LOGGER.debug("Added auth_type field to V1 API config entry")
elif CONF_USERNAME in config:
new_data[CONF_AUTH_TYPE] = AUTH_PASSWORD
hass.config_entries.async_update_entry(config_entry, data=new_data)
config = config_entry.data
_LOGGER.debug("Added auth_type field to Classic API config entry")
else:
# Config entry has no auth fields - this is invalid but migration
# should still succeed. Setup will fail later with a clearer error.
_LOGGER.warning(
"Config entry has no authentication fields. "
"Setup will fail until the integration is reconfigured"
)
# Handle DEFAULT_PLANT_ID resolution
if config.get(CONF_PLANT_ID) == DEFAULT_PLANT_ID:
# V1 API should never have DEFAULT_PLANT_ID (plant selection happens in config flow)
# If it does, this indicates a corrupted config entry
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
_LOGGER.error(
"V1 API config entry has DEFAULT_PLANT_ID, which indicates a "
"corrupted configuration. Please reconfigure the integration"
)
return False
# Classic API with DEFAULT_PLANT_ID - resolve to actual plant_id
if config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
url = config.get(CONF_URL, DEFAULT_URL)
if not username or not password:
# Credentials missing - cannot migrate
_LOGGER.error(
"Cannot migrate DEFAULT_PLANT_ID due to missing credentials"
)
return False
try:
# Create API instance and login
api, login_response = await _create_api_and_login(
hass, username, password, url
)
# Resolve DEFAULT_PLANT_ID to actual plant_id
plant_info = await hass.async_add_executor_job(
api.plant_list, login_response["user"]["id"]
)
except (ConfigEntryError, RequestException, JSONDecodeError) as ex:
# API failure during migration - return False to retry later
_LOGGER.error(
"Failed to resolve plant_id during migration: %s. "
"Migration will retry on next restart",
ex,
)
return False
if not plant_info or "data" not in plant_info or not plant_info["data"]:
_LOGGER.error(
"No plants found for this account. "
"Migration will retry on next restart"
)
return False
first_plant_id = plant_info["data"][0]["plantId"]
# Update config entry with resolved plant_id
new_data = dict(config_entry.data)
new_data[CONF_PLANT_ID] = first_plant_id
hass.config_entries.async_update_entry(
config_entry, data=new_data, minor_version=1
)
# Cache the logged-in API instance for reuse in async_setup_entry()
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][f"{CACHED_API_KEY}{config_entry.entry_id}"] = api
_LOGGER.info(
"Migrated config entry to use specific plant_id '%s'",
first_plant_id,
)
else:
# No DEFAULT_PLANT_ID to resolve, just bump version
hass.config_entries.async_update_entry(config_entry, minor_version=1)
_LOGGER.debug("Migration completed to version %s.%s", config_entry.version, 1)
return True
async def _create_api_and_login(
hass: HomeAssistant, username: str, password: str, url: str
) -> tuple[growattServer.GrowattApi, dict]:
"""Create API instance and perform login.
Returns both the API instance (with authenticated session) and the login
response (containing user_id needed for subsequent API calls).
"""
api = growattServer.GrowattApi(add_random_user_id=True, agent_identifier=username)
api.server_url = url
login_response = await hass.async_add_executor_job(
_login_classic_api, api, username, password
)
return api, login_response
def _login_classic_api(
api: growattServer.GrowattApi, username: str, password: str
) -> dict:
"""Log in to Classic API and return user info."""
try:
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
login_response = api.login(username, password)
except (RequestException, JSONDecodeError) as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during login: {ex}"
@@ -62,31 +211,7 @@ def get_device_list_classic(
raise ConfigEntryAuthFailed("Username, Password or URL may be incorrect!")
raise ConfigEntryError(f"Growatt login failed: {msg}")
user_id = login_response["user"]["id"]
# Legacy support: DEFAULT_PLANT_ID ("0") triggers auto-selection of first plant.
# Modern config flow always sets a specific plant_id, but old config entries
# from earlier versions may still have plant_id="0".
if plant_id == DEFAULT_PLANT_ID:
try:
plant_info = api.plant_list(user_id)
except (RequestException, JSONDecodeError) as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during plant list: {ex}"
) from ex
if not plant_info or "data" not in plant_info or not plant_info["data"]:
raise ConfigEntryError("No plants found for this account.")
plant_id = plant_info["data"][0]["plantId"]
# Get a list of devices for specified plant to add sensors for.
try:
devices = api.device_list(plant_id)
except (RequestException, JSONDecodeError) as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during device list: {ex}"
) from ex
return devices, plant_id
return login_response
def get_device_list_v1(
@@ -94,9 +219,9 @@ def get_device_list_v1(
) -> tuple[list[dict[str, str]], str]:
"""Device list logic for Open API V1.
Note: Plant selection (including auto-selection if only one plant exists)
is handled in the config flow before this function is called. This function
only fetches devices for the already-selected plant_id.
Plant selection is handled in the config flow before this function is called.
This function expects a specific plant_id and fetches devices for that plant.
"""
plant_id = config[CONF_PLANT_ID]
try:
@@ -126,19 +251,6 @@ def get_device_list_v1(
return supported_devices, plant_id
def get_device_list(
api, config: Mapping[str, str], api_version: str
) -> tuple[list[dict[str, str]], str]:
"""Dispatch to correct device list logic based on API version."""
if api_version == "v1":
return get_device_list_v1(api, config)
if api_version == "classic":
return get_device_list_classic(api, config)
# Defensive: api_version is hardcoded in async_setup_entry as "v1" or "classic"
# This line is unreachable through normal execution but kept as a safeguard
raise ConfigEntryError(f"Unknown API version: {api_version}") # pragma: no cover
async def async_setup_entry(
hass: HomeAssistant, config_entry: GrowattConfigEntry
) -> bool:
@@ -154,40 +266,47 @@ async def async_setup_entry(
new_data[CONF_URL] = url
hass.config_entries.async_update_entry(config_entry, data=new_data)
# Migrate legacy config entries without auth_type field
if CONF_AUTH_TYPE not in config:
new_data = dict(config_entry.data)
# Detect auth type based on which fields are present
if CONF_TOKEN in config:
new_data[CONF_AUTH_TYPE] = AUTH_API_TOKEN
elif CONF_USERNAME in config:
new_data[CONF_AUTH_TYPE] = AUTH_PASSWORD
else:
raise ConfigEntryError(
"Unable to determine authentication type from config entry."
)
hass.config_entries.async_update_entry(config_entry, data=new_data)
config = config_entry.data
# Determine API version
# Determine API version and get devices
# Note: auth_type field is guaranteed to exist after migration
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
api_version = "v1"
# V1 API (token-based, no login needed)
token = config[CONF_TOKEN]
api = growattServer.OpenApiV1(token=token)
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
api_version = "classic"
username = config[CONF_USERNAME]
api = growattServer.GrowattApi(
add_random_user_id=True, agent_identifier=username
devices, plant_id = await hass.async_add_executor_job(
get_device_list_v1, api, config
)
api.server_url = url
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
# Classic API (username/password with login)
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
# Check if migration cached an authenticated API instance for us to reuse.
# This avoids calling login() twice (once in migration, once here) which
# would trigger rate limiting.
cached_api = hass.data.get(DOMAIN, {}).pop(
f"{CACHED_API_KEY}{config_entry.entry_id}", None
)
if cached_api:
# Reuse the logged-in API instance from migration (rate limit optimization)
api = cached_api
_LOGGER.debug("Reusing logged-in session from migration")
else:
# No cached API (normal setup or migration didn't run)
# Create new API instance and login
api, _ = await _create_api_and_login(hass, username, password, url)
# Get plant_id and devices using the authenticated session
plant_id = config[CONF_PLANT_ID]
try:
devices = await hass.async_add_executor_job(api.device_list, plant_id)
except (RequestException, JSONDecodeError) as ex:
raise ConfigEntryError(
f"Error communicating with Growatt API during device list: {ex}"
) from ex
else:
raise ConfigEntryError("Unknown authentication type in config entry.")
devices, plant_id = await hass.async_add_executor_job(
get_device_list, api, config, api_version
)
# Create a coordinator for the total sensors
total_coordinator = GrowattCoordinator(
hass, config_entry, plant_id, "total", plant_id

View File

@@ -40,6 +40,7 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
"""Config flow class."""
VERSION = 1
MINOR_VERSION = 1
api: growattServer.GrowattApi

View File

@@ -53,3 +53,8 @@ ABORT_NO_PLANTS = "no_plants"
BATT_MODE_LOAD_FIRST = 0
BATT_MODE_BATTERY_FIRST = 1
BATT_MODE_GRID_FIRST = 2
# Internal key prefix for caching authenticated API instance
# Used to pass logged-in session from async_migrate_entry to async_setup_entry
# to avoid double login() calls that trigger API rate limiting
CACHED_API_KEY = "_cached_api_"

View File

@@ -28,7 +28,6 @@ from habiticalib import (
import voluptuous as vol
from homeassistant.components.todo import ATTR_RENAME
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import ATTR_DATE, ATTR_NAME
from homeassistant.core import (
HomeAssistant,
@@ -38,7 +37,7 @@ from homeassistant.core import (
callback,
)
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers.selector import ConfigEntrySelector
from homeassistant.util import dt as dt_util
@@ -243,24 +242,11 @@ SERVICE_TASK_TYPE_MAP = {
}
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
"""Return config entry or raise if not found or not loaded."""
if not (entry := hass.config_entries.async_get_entry(entry_id)):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_found",
)
if entry.state is not ConfigEntryState.LOADED:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="entry_not_loaded",
)
return entry
async def _cast_skill(call: ServiceCall) -> ServiceResponse:
"""Skill action."""
entry = get_config_entry(call.hass, call.data[ATTR_CONFIG_ENTRY])
entry: HabiticaConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
)
coordinator = entry.runtime_data
skill = SKILL_MAP[call.data[ATTR_SKILL]]
@@ -324,7 +310,9 @@ async def _cast_skill(call: ServiceCall) -> ServiceResponse:
async def _manage_quests(call: ServiceCall) -> ServiceResponse:
"""Accept, reject, start, leave or cancel quests."""
entry = get_config_entry(call.hass, call.data[ATTR_CONFIG_ENTRY])
entry: HabiticaConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
)
coordinator = entry.runtime_data
FUNC_MAP = {
@@ -372,7 +360,9 @@ async def _manage_quests(call: ServiceCall) -> ServiceResponse:
async def _score_task(call: ServiceCall) -> ServiceResponse:
"""Score a task action."""
entry = get_config_entry(call.hass, call.data[ATTR_CONFIG_ENTRY])
entry: HabiticaConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
)
coordinator = entry.runtime_data
direction = (
@@ -436,7 +426,9 @@ async def _score_task(call: ServiceCall) -> ServiceResponse:
async def _transformation(call: ServiceCall) -> ServiceResponse:
"""User a transformation item on a player character."""
entry = get_config_entry(call.hass, call.data[ATTR_CONFIG_ENTRY])
entry: HabiticaConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
)
coordinator = entry.runtime_data
item = ITEMID_MAP[call.data[ATTR_ITEM]]
@@ -519,7 +511,9 @@ async def _transformation(call: ServiceCall) -> ServiceResponse:
async def _get_tasks(call: ServiceCall) -> ServiceResponse:
"""Get tasks action."""
entry = get_config_entry(call.hass, call.data[ATTR_CONFIG_ENTRY])
entry: HabiticaConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
)
coordinator = entry.runtime_data
response: list[TaskData] = coordinator.data.tasks
@@ -568,7 +562,9 @@ async def _get_tasks(call: ServiceCall) -> ServiceResponse:
async def _create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa: C901
"""Create or update task action."""
entry = get_config_entry(call.hass, call.data[ATTR_CONFIG_ENTRY])
entry: HabiticaConfigEntry = service.async_get_config_entry(
call.hass, DOMAIN, call.data[ATTR_CONFIG_ENTRY]
)
coordinator = entry.runtime_data
await coordinator.async_refresh()
is_update = call.service in (
@@ -852,7 +848,7 @@ async def _create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa:
def async_setup_services(hass: HomeAssistant) -> None:
"""Set up services for Habitica integration."""
for service in (
for service_name in (
SERVICE_ABORT_QUEST,
SERVICE_ACCEPT_QUEST,
SERVICE_CANCEL_QUEST,
@@ -862,13 +858,13 @@ def async_setup_services(hass: HomeAssistant) -> None:
):
hass.services.async_register(
DOMAIN,
service,
service_name,
_manage_quests,
schema=SERVICE_MANAGE_QUEST_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
for service in (
for service_name in (
SERVICE_UPDATE_DAILY,
SERVICE_UPDATE_HABIT,
SERVICE_UPDATE_REWARD,
@@ -876,12 +872,12 @@ def async_setup_services(hass: HomeAssistant) -> None:
):
hass.services.async_register(
DOMAIN,
service,
service_name,
_create_or_update_task,
schema=SERVICE_UPDATE_TASK_SCHEMA,
supports_response=SupportsResponse.ONLY,
)
for service in (
for service_name in (
SERVICE_CREATE_DAILY,
SERVICE_CREATE_HABIT,
SERVICE_CREATE_REWARD,
@@ -889,7 +885,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
):
hass.services.async_register(
DOMAIN,
service,
service_name,
_create_or_update_task,
schema=SERVICE_CREATE_TASK_SCHEMA,
supports_response=SupportsResponse.ONLY,

View File

@@ -550,12 +550,6 @@
"delete_todos_failed": {
"message": "Unable to delete item from Habitica to-do list, please try again"
},
"entry_not_found": {
"message": "The selected character is not configured in Home Assistant."
},
"entry_not_loaded": {
"message": "The selected character is currently not loaded or disabled in Home Assistant."
},
"frequency_not_monthly": {
"message": "Unable to update task, monthly repeat settings apply only to monthly recurring dailies."
},

View File

@@ -2,14 +2,13 @@
from typing import Any
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .coordinator import HDFuryCoordinator
from .coordinator import HDFuryConfigEntry, HDFuryCoordinator
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: ConfigEntry
hass: HomeAssistant, entry: HDFuryConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: HDFuryCoordinator = entry.runtime_data

View File

@@ -64,6 +64,9 @@
"autosw": {
"default": "mdi:import"
},
"cec": {
"default": "mdi:remote-tv"
},
"cec0en": {
"default": "mdi:remote-tv"
},
@@ -102,6 +105,12 @@
},
"relay": {
"default": "mdi:electric-switch"
},
"tx0plus5": {
"default": "mdi:lightning-bolt"
},
"tx1plus5": {
"default": "mdi:lightning-bolt"
}
}
}

View File

@@ -6,8 +6,8 @@
"documentation": "https://www.home-assistant.io/integrations/hdfury",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "gold",
"requirements": ["hdfury==1.4.2"],
"quality_scale": "platinum",
"requirements": ["hdfury==1.5.0"],
"zeroconf": [
{ "name": "diva-*", "type": "_http._tcp.local." },
{ "name": "vertex2-*", "type": "_http._tcp.local." },

View File

@@ -73,4 +73,4 @@ rules:
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo
strict-typing: done

View File

@@ -3,13 +3,7 @@
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from hdfury import (
OPERATION_MODES,
TX0_INPUT_PORTS,
TX1_INPUT_PORTS,
HDFuryAPI,
HDFuryError,
)
from hdfury import OPERATION_MODES, TX0_INPUT_PORTS, TX1_INPUT_PORTS, HDFuryError
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant
@@ -27,7 +21,7 @@ PARALLEL_UPDATES = 1
class HDFurySelectEntityDescription(SelectEntityDescription):
"""Description for HDFury select entities."""
set_value_fn: Callable[[HDFuryAPI, str], Awaitable[None]]
set_value_fn: Callable[[HDFuryCoordinator, str], Awaitable[None]]
SELECT_PORTS: tuple[HDFurySelectEntityDescription, ...] = (

View File

@@ -121,6 +121,9 @@
"autosw": {
"name": "Auto switch inputs"
},
"cec": {
"name": "CEC"
},
"cec0en": {
"name": "CEC RX0"
},
@@ -159,6 +162,12 @@
},
"relay": {
"name": "Relay"
},
"tx0plus5": {
"name": "TX0 force +5v"
},
"tx1plus5": {
"name": "TX1 force +5v"
}
}
},

View File

@@ -33,6 +33,12 @@ SWITCHES: tuple[HDFurySwitchEntityDescription, ...] = (
entity_category=EntityCategory.CONFIG,
set_value_fn=lambda client, value: client.set_auto_switch_inputs(value),
),
HDFurySwitchEntityDescription(
key="cec",
translation_key="cec",
entity_category=EntityCategory.CONFIG,
set_value_fn=lambda client, value: client.set_cec(value),
),
HDFurySwitchEntityDescription(
key="cec0en",
translation_key="cec0en",
@@ -111,6 +117,20 @@ SWITCHES: tuple[HDFurySwitchEntityDescription, ...] = (
entity_category=EntityCategory.CONFIG,
set_value_fn=lambda client, value: client.set_relay(value),
),
HDFurySwitchEntityDescription(
key="tx0plus5",
translation_key="tx0plus5",
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
set_value_fn=lambda client, value: client.set_tx0_force_5v(value),
),
HDFurySwitchEntityDescription(
key="tx1plus5",
translation_key="tx1plus5",
entity_registry_enabled_default=False,
entity_category=EntityCategory.CONFIG,
set_value_fn=lambda client, value: client.set_tx1_force_5v(value),
),
)

View File

@@ -11,6 +11,7 @@ from homeassistant.components.binary_sensor import (
PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
@@ -23,6 +24,7 @@ from homeassistant.const import (
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
EntityCategory,
)
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
@@ -42,29 +44,118 @@ CONF_IGNORED = "ignored"
DEFAULT_DELAY = 0
DEFAULT_IGNORED = False
# Device class mapping for Hikvision event types
DEVICE_CLASS_MAP: dict[str, BinarySensorDeviceClass | None] = {
"Motion": BinarySensorDeviceClass.MOTION,
"Line Crossing": BinarySensorDeviceClass.MOTION,
"Field Detection": BinarySensorDeviceClass.MOTION,
"Tamper Detection": BinarySensorDeviceClass.MOTION,
"Shelter Alarm": None,
"Disk Full": None,
"Disk Error": None,
"Net Interface Broken": BinarySensorDeviceClass.CONNECTIVITY,
"IP Conflict": BinarySensorDeviceClass.CONNECTIVITY,
"Illegal Access": None,
"Video Mismatch": None,
"Bad Video": None,
"PIR Alarm": BinarySensorDeviceClass.MOTION,
"Face Detection": BinarySensorDeviceClass.MOTION,
"Scene Change Detection": BinarySensorDeviceClass.MOTION,
"I/O": None,
"Unattended Baggage": BinarySensorDeviceClass.MOTION,
"Attended Baggage": BinarySensorDeviceClass.MOTION,
"Recording Failure": None,
"Exiting Region": BinarySensorDeviceClass.MOTION,
"Entering Region": BinarySensorDeviceClass.MOTION,
# Entity descriptions for known Hikvision event types
# The key matches the sensor_type from pyhik (the friendly name from SENSOR_MAP)
BINARY_SENSOR_DESCRIPTIONS: dict[str, BinarySensorEntityDescription] = {
"Motion": BinarySensorEntityDescription(
key="motion",
device_class=BinarySensorDeviceClass.MOTION,
),
"Line Crossing": BinarySensorEntityDescription(
key="line_crossing",
translation_key="line_crossing",
device_class=BinarySensorDeviceClass.MOTION,
),
"Field Detection": BinarySensorEntityDescription(
key="field_detection",
translation_key="field_detection",
device_class=BinarySensorDeviceClass.MOTION,
),
"Tamper Detection": BinarySensorEntityDescription(
key="tamper_detection",
device_class=BinarySensorDeviceClass.TAMPER,
),
"Shelter Alarm": BinarySensorEntityDescription(
key="shelter_alarm",
translation_key="shelter_alarm",
),
"Disk Full": BinarySensorEntityDescription(
key="disk_full",
translation_key="disk_full",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Disk Error": BinarySensorEntityDescription(
key="disk_error",
translation_key="disk_error",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Net Interface Broken": BinarySensorEntityDescription(
key="net_interface_broken",
translation_key="net_interface_broken",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
),
"IP Conflict": BinarySensorEntityDescription(
key="ip_conflict",
translation_key="ip_conflict",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Illegal Access": BinarySensorEntityDescription(
key="illegal_access",
translation_key="illegal_access",
device_class=BinarySensorDeviceClass.SAFETY,
),
"Video Mismatch": BinarySensorEntityDescription(
key="video_mismatch",
translation_key="video_mismatch",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Bad Video": BinarySensorEntityDescription(
key="bad_video",
translation_key="bad_video",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"PIR Alarm": BinarySensorEntityDescription(
key="pir_alarm",
translation_key="pir_alarm",
device_class=BinarySensorDeviceClass.MOTION,
),
"Face Detection": BinarySensorEntityDescription(
key="face_detection",
translation_key="face_detection",
device_class=BinarySensorDeviceClass.MOTION,
),
"Scene Change Detection": BinarySensorEntityDescription(
key="scene_change_detection",
translation_key="scene_change_detection",
device_class=BinarySensorDeviceClass.MOTION,
),
"I/O": BinarySensorEntityDescription(
key="io",
translation_key="io",
),
"Unattended Baggage": BinarySensorEntityDescription(
key="unattended_baggage",
translation_key="unattended_baggage",
device_class=BinarySensorDeviceClass.MOTION,
),
"Attended Baggage": BinarySensorEntityDescription(
key="attended_baggage",
translation_key="attended_baggage",
device_class=BinarySensorDeviceClass.MOTION,
),
"Recording Failure": BinarySensorEntityDescription(
key="recording_failure",
translation_key="recording_failure",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Exiting Region": BinarySensorEntityDescription(
key="exiting_region",
translation_key="exiting_region",
device_class=BinarySensorDeviceClass.MOTION,
),
"Entering Region": BinarySensorEntityDescription(
key="entering_region",
translation_key="entering_region",
device_class=BinarySensorDeviceClass.MOTION,
),
}
_LOGGER = logging.getLogger(__name__)
@@ -158,13 +249,24 @@ async def async_setup_entry(
)
return
# Log warnings for unknown sensor types and skip them
for sensor_type in sensors:
if sensor_type not in BINARY_SENSOR_DESCRIPTIONS:
_LOGGER.warning(
"Unknown Hikvision sensor type '%s', please report this at "
"https://github.com/home-assistant/core/issues",
sensor_type,
)
async_add_entities(
HikvisionBinarySensor(
entry=entry,
description=BINARY_SENSOR_DESCRIPTIONS[sensor_type],
sensor_type=sensor_type,
channel=channel_info[1],
)
for sensor_type, channel_list in sensors.items()
if sensor_type in BINARY_SENSOR_DESCRIPTIONS
for channel_info in channel_list
)
@@ -177,20 +279,18 @@ class HikvisionBinarySensor(HikvisionEntity, BinarySensorEntity):
def __init__(
self,
entry: HikvisionConfigEntry,
description: BinarySensorEntityDescription,
sensor_type: str,
channel: int,
) -> None:
"""Initialize the binary sensor."""
super().__init__(entry, channel)
self.entity_description = description
self._sensor_type = sensor_type
# Build unique ID (includes sensor_type for uniqueness per sensor)
self._attr_unique_id = f"{self._data.device_id}_{sensor_type}_{channel}"
# Set entity name and device class
self._attr_name = sensor_type
self._attr_device_class = DEVICE_CLASS_MAP.get(sensor_type)
# Callback ID for pyhik
self._callback_id = f"{self._data.device_id}.{sensor_type}.{channel}"

View File

@@ -34,6 +34,67 @@
"name": "{device_name} channel {channel_number}"
}
},
"entity": {
"binary_sensor": {
"attended_baggage": {
"name": "Attended baggage"
},
"bad_video": {
"name": "Bad video"
},
"disk_error": {
"name": "Disk error"
},
"disk_full": {
"name": "Disk full"
},
"entering_region": {
"name": "Entering region"
},
"exiting_region": {
"name": "Exiting region"
},
"face_detection": {
"name": "Face detection"
},
"field_detection": {
"name": "Field detection"
},
"illegal_access": {
"name": "Illegal access"
},
"io": {
"name": "I/O alarm"
},
"ip_conflict": {
"name": "IP conflict"
},
"line_crossing": {
"name": "Line crossing"
},
"net_interface_broken": {
"name": "Network interface broken"
},
"pir_alarm": {
"name": "PIR alarm"
},
"recording_failure": {
"name": "Recording failure"
},
"scene_change_detection": {
"name": "Scene change detection"
},
"shelter_alarm": {
"name": "Shelter alarm"
},
"unattended_baggage": {
"name": "Unattended baggage"
},
"video_mismatch": {
"name": "Video mismatch"
}
}
},
"issues": {
"deprecated_yaml_import_issue": {
"description": "Configuring {integration_title} using YAML is deprecated and the import failed. Please remove the `{domain}` entry from your `configuration.yaml` file and set up the integration manually.",

View File

@@ -460,6 +460,12 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN):
except aiohomekit.AccessoryNotFoundError:
# Can no longer find the device on the network
return self.async_abort(reason="accessory_not_found_error")
except aiohomekit.AccessoryDisconnectedError as err:
# The accessory has disconnected from the network
return self.async_abort(
reason="accessory_disconnected_error",
description_placeholders={"error": str(err)},
)
except InsecureSetupCode:
errors["pairing_code"] = "insecure_setup_code"
except Exception as err:
@@ -490,6 +496,12 @@ class HomekitControllerFlowHandler(ConfigFlow, domain=DOMAIN):
except aiohomekit.AccessoryNotFoundError:
# Can no longer find the device on the network
return self.async_abort(reason="accessory_not_found_error")
except aiohomekit.AccessoryDisconnectedError as err:
# The accessory has disconnected from the network
return self.async_abort(
reason="accessory_disconnected_error",
description_placeholders={"error": str(err)},
)
except IndexError:
# TLV error, usually not in pairing mode
_LOGGER.exception("Pairing communication failed")

View File

@@ -1,6 +1,7 @@
{
"config": {
"abort": {
"accessory_disconnected_error": "A connectivity error occurred while attempting to pair with this device.\n\n{error}",
"accessory_not_found_error": "Cannot add pairing as device can no longer be found.",
"already_configured": "Accessory is already configured with this controller.",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",

View File

@@ -161,6 +161,11 @@ class HomematicipHAP:
_LOGGER.error("HMIP access point has lost connection with the cloud")
self._ws_connection_closed.set()
self.set_all_to_unavailable()
elif self._ws_connection_closed.is_set():
_LOGGER.info("HMIP access point has reconnected to the cloud")
self._get_state_task = self.hass.async_create_task(self._try_get_state())
self._get_state_task.add_done_callback(self.get_state_finished)
self._ws_connection_closed.clear()
@callback
def async_create_entity(self, *args, **kwargs) -> None:

View File

@@ -0,0 +1,41 @@
"""The Homevolt integration."""
from __future__ import annotations
from homevolt import Homevolt
from homeassistant.const import CONF_HOST, CONF_PASSWORD, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import HomevoltConfigEntry, HomevoltDataUpdateCoordinator
PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(hass: HomeAssistant, entry: HomevoltConfigEntry) -> bool:
"""Set up Homevolt from a config entry."""
host: str = entry.data[CONF_HOST]
password: str | None = entry.data.get(CONF_PASSWORD)
websession = async_get_clientsession(hass)
client = Homevolt(host, password, websession=websession)
coordinator = HomevoltDataUpdateCoordinator(hass, entry, client)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: HomevoltConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
await entry.runtime_data.client.close_connection()
return unload_ok

View File

@@ -0,0 +1,225 @@
"""Config flow for the Homevolt integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from homevolt import Homevolt, HomevoltAuthenticationError, HomevoltConnectionError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
}
)
STEP_CREDENTIALS_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
)
class HomevoltConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Homevolt."""
VERSION = 1
MINOR_VERSION = 1
def __init__(self) -> None:
"""Initialize the config flow."""
self._host: str | None = None
self._need_password: bool = False
async def check_status(self, client: Homevolt) -> dict[str, str]:
"""Check connection status and return errors if any."""
errors: dict[str, str] = {}
try:
await client.update_info()
except HomevoltAuthenticationError:
errors["base"] = "invalid_auth"
except HomevoltConnectionError:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Error occurred while connecting to the Homevolt battery")
errors["base"] = "unknown"
return errors
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
host = user_input[CONF_HOST]
password = None
websession = async_get_clientsession(self.hass)
client = Homevolt(host, password, websession=websession)
errors = await self.check_status(client)
if errors.get("base") == "invalid_auth":
self._host = host
return await self.async_step_credentials()
if not errors:
device_id = client.unique_id
await self.async_set_unique_id(device_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title="Homevolt",
data={
CONF_HOST: host,
CONF_PASSWORD: None,
},
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Handle reauth on authentication failure."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth confirmation with new credentials."""
reauth_entry = self._get_reauth_entry()
host = reauth_entry.data[CONF_HOST]
errors: dict[str, str] = {}
if user_input is not None:
password = user_input[CONF_PASSWORD]
websession = async_get_clientsession(self.hass)
client = Homevolt(host, password, websession=websession)
errors = await self.check_status(client)
if not errors:
device_id = client.unique_id
await self.async_set_unique_id(device_id)
self._abort_if_unique_id_mismatch(reason="wrong_account")
return self.async_update_reload_and_abort(
reauth_entry,
unique_id=device_id,
data_updates={CONF_HOST: host, CONF_PASSWORD: password},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=STEP_CREDENTIALS_DATA_SCHEMA,
errors=errors,
description_placeholders={"host": host},
)
async def async_step_credentials(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the credentials step."""
errors: dict[str, str] = {}
assert self._host is not None
if user_input is not None:
password = user_input[CONF_PASSWORD]
websession = async_get_clientsession(self.hass)
client = Homevolt(self._host, password, websession=websession)
errors = await self.check_status(client)
if not errors:
device_id = client.unique_id
await self.async_set_unique_id(device_id)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title="Homevolt",
data={
CONF_HOST: self._host,
CONF_PASSWORD: password,
},
)
return self.async_show_form(
step_id="credentials",
data_schema=STEP_CREDENTIALS_DATA_SCHEMA,
errors=errors,
description_placeholders={"host": self._host},
)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
"""Handle zeroconf discovery."""
self._host = discovery_info.host
self._async_abort_entries_match({CONF_HOST: self._host})
websession = async_get_clientsession(self.hass)
client = Homevolt(self._host, None, websession=websession)
errors = await self.check_status(client)
if errors.get("base") == "invalid_auth":
self._need_password = True
elif errors:
return self.async_abort(reason=errors["base"])
else:
await self.async_set_unique_id(client.unique_id)
self._abort_if_unique_id_configured(
updates={CONF_HOST: self._host},
)
return await self.async_step_zeroconf_confirm()
async def async_step_zeroconf_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm zeroconf discovery."""
assert self._host is not None
errors: dict[str, str] = {}
if user_input is None:
if self._need_password:
return self.async_show_form(
step_id="zeroconf_confirm",
data_schema=STEP_CREDENTIALS_DATA_SCHEMA,
errors=errors,
description_placeholders={"host": self._host},
)
self._set_confirm_only()
return self.async_show_form(
step_id="zeroconf_confirm",
description_placeholders={"host": self._host},
)
password: str | None = None
if self._need_password:
password = user_input[CONF_PASSWORD]
websession = async_get_clientsession(self.hass)
client = Homevolt(self._host, password, websession=websession)
errors = await self.check_status(client)
if errors:
return self.async_show_form(
step_id="zeroconf_confirm",
data_schema=STEP_CREDENTIALS_DATA_SCHEMA,
errors=errors,
description_placeholders={"host": self._host},
)
await self.async_set_unique_id(client.unique_id)
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
return self.async_create_entry(
title="Homevolt",
data={CONF_HOST: self._host, CONF_PASSWORD: password},
)

View File

@@ -0,0 +1,9 @@
"""Constants for the Homevolt integration."""
from __future__ import annotations
from datetime import timedelta
DOMAIN = "homevolt"
MANUFACTURER = "Homevolt"
SCAN_INTERVAL = timedelta(seconds=15)

View File

@@ -0,0 +1,56 @@
"""Data update coordinator for Homevolt integration."""
from __future__ import annotations
import logging
from homevolt import (
Homevolt,
HomevoltAuthenticationError,
HomevoltConnectionError,
HomevoltError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN, SCAN_INTERVAL
type HomevoltConfigEntry = ConfigEntry[HomevoltDataUpdateCoordinator]
_LOGGER = logging.getLogger(__name__)
class HomevoltDataUpdateCoordinator(DataUpdateCoordinator[Homevolt]):
"""Class to manage fetching Homevolt data."""
config_entry: HomevoltConfigEntry
def __init__(
self,
hass: HomeAssistant,
entry: HomevoltConfigEntry,
client: Homevolt,
) -> None:
"""Initialize the Homevolt coordinator."""
self.client = client
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
config_entry=entry,
)
async def _async_update_data(self) -> Homevolt:
"""Fetch data from the Homevolt API."""
try:
await self.client.update_info()
except HomevoltAuthenticationError as err:
raise ConfigEntryAuthFailed from err
except (HomevoltConnectionError, HomevoltError) as err:
raise UpdateFailed(f"Error communicating with device: {err}") from err
return self.client

View File

@@ -0,0 +1,55 @@
"""Diagnostics support for Homevolt."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from .coordinator import HomevoltConfigEntry
TO_REDACT = {CONF_HOST, CONF_PASSWORD}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: HomevoltConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator = entry.runtime_data
client = coordinator.data
result: dict[str, Any] = {
"config": async_redact_data(entry.data, TO_REDACT),
"coordinator": {
"last_update_success": coordinator.last_update_success,
"last_exception": (
str(coordinator.last_exception) if coordinator.last_exception else None
),
},
}
if client is None:
return result
result["device"] = {
"unique_id": client.unique_id,
}
result["sensors"] = {
key: {"value": sensor.value, "type": sensor.type}
for key, sensor in client.sensors.items()
}
result["ems"] = {
device_id: {
"name": metadata.name,
"model": metadata.model,
"sensors": {
key: sensor.value
for key, sensor in client.sensors.items()
if sensor.device_identifier == device_id
},
}
for device_id, metadata in client.device_metadata.items()
}
return result

View File

@@ -0,0 +1,17 @@
{
"domain": "homevolt",
"name": "Homevolt",
"codeowners": ["@danielhiversen"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/homevolt",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["homevolt==0.4.4"],
"zeroconf": [
{
"name": "homevolt*",
"type": "_http._tcp.local."
}
]
}

View File

@@ -0,0 +1,70 @@
rules:
# Bronze
action-setup:
status: exempt
comment: Integration does not register custom actions.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: Integration does not register custom actions.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: Local_polling without events
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: Integration does not register custom actions.
config-entry-unloading: done
docs-configuration-parameters:
status: exempt
comment: Integration does not have an options flow.
docs-installation-parameters: todo
entity-unavailable: done
integration-owner: done
log-when-unavailable: todo
parallel-updates: done
reauthentication-flow: done
test-coverage: todo
# Gold
devices: done
diagnostics: done
discovery-update-info: done
discovery: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -0,0 +1,351 @@
"""Support for Homevolt sensors."""
from __future__ import annotations
import logging
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import (
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS,
EntityCategory,
UnitOfElectricCurrent,
UnitOfElectricPotential,
UnitOfEnergy,
UnitOfFrequency,
UnitOfPower,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import HomevoltConfigEntry, HomevoltDataUpdateCoordinator
PARALLEL_UPDATES = 0 # Coordinator-based updates
_LOGGER = logging.getLogger(__name__)
SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="available_charging_energy",
translation_key="available_charging_energy",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
),
SensorEntityDescription(
key="available_charging_power",
translation_key="available_charging_power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
),
SensorEntityDescription(
key="available_discharge_energy",
translation_key="available_discharge_energy",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
),
SensorEntityDescription(
key="available_discharge_power",
translation_key="available_discharge_power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
),
SensorEntityDescription(
key="rssi",
translation_key="rssi",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="average_rssi",
translation_key="average_rssi",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="charge_cycles",
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement="cycles",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="energy_exported",
translation_key="energy_exported",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
),
SensorEntityDescription(
key="energy_imported",
translation_key="energy_imported",
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
),
SensorEntityDescription(
key="frequency",
device_class=SensorDeviceClass.FREQUENCY,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfFrequency.HERTZ,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="l1_current",
translation_key="l1_current",
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
),
SensorEntityDescription(
key="l1_l2_voltage",
translation_key="l1_l2_voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
),
SensorEntityDescription(
key="l1_power",
translation_key="l1_power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="l1_voltage",
translation_key="l1_voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="l2_current",
translation_key="l2_current",
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
),
SensorEntityDescription(
key="l2_l3_voltage",
translation_key="l2_l3_voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
),
SensorEntityDescription(
key="l2_power",
translation_key="l2_power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="l2_voltage",
translation_key="l2_voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="l3_current",
translation_key="l3_current",
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
),
SensorEntityDescription(
key="l3_l1_voltage",
translation_key="l3_l1_voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
),
SensorEntityDescription(
key="l3_power",
translation_key="l3_power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="l3_voltage",
translation_key="l3_voltage",
device_class=SensorDeviceClass.VOLTAGE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="schedule_id",
translation_key="schedule_id",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="schedule_max_discharge",
translation_key="schedule_max_discharge",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="schedule_max_power",
translation_key="schedule_max_power",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="schedule_power_setpoint",
translation_key="schedule_power_setpoint",
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="schedule_type",
translation_key="schedule_type",
device_class=SensorDeviceClass.ENUM,
options=[
"idle",
"inverter_charge",
"inverter_discharge",
"grid_charge",
"grid_discharge",
"grid_charge_discharge",
"frequency_reserve",
"solar_charge",
"solar_charge_discharge",
"full_solar_export",
],
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="state_of_charge",
device_class=SensorDeviceClass.BATTERY,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=PERCENTAGE,
),
SensorEntityDescription(
key="system_temperature",
translation_key="system_temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="tmax",
translation_key="tmax",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="tmin",
translation_key="tmin",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: HomevoltConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Homevolt sensor."""
coordinator = entry.runtime_data
entities: list[HomevoltSensor] = []
sensors_by_key = {sensor.key: sensor for sensor in SENSORS}
for sensor_key, sensor in coordinator.data.sensors.items():
if (description := sensors_by_key.get(sensor.type)) is None:
_LOGGER.warning("Unsupported sensor '%s' found during setup", sensor)
continue
entities.append(
HomevoltSensor(
description,
coordinator,
sensor_key,
)
)
async_add_entities(entities)
class HomevoltSensor(CoordinatorEntity[HomevoltDataUpdateCoordinator], SensorEntity):
"""Representation of a Homevolt sensor."""
entity_description: SensorEntityDescription
_attr_has_entity_name = True
def __init__(
self,
description: SensorEntityDescription,
coordinator: HomevoltDataUpdateCoordinator,
sensor_key: str,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
unique_id = coordinator.data.unique_id
self._attr_unique_id = f"{unique_id}_{sensor_key}"
sensor_data = coordinator.data.sensors[sensor_key]
self._sensor_key = sensor_key
device_metadata = coordinator.data.device_metadata.get(
sensor_data.device_identifier
)
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{unique_id}_{sensor_data.device_identifier}")},
configuration_url=coordinator.client.base_url,
manufacturer=MANUFACTURER,
model=device_metadata.model if device_metadata else None,
name=device_metadata.name if device_metadata else None,
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._sensor_key in self.coordinator.data.sensors
@property
def native_value(self) -> StateType:
"""Return the native value of the sensor."""
return self.coordinator.data.sensors[self._sensor_key].value

View File

@@ -0,0 +1,165 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]",
"wrong_account": "The device you authenticated with is different from the one configured. Re-authenticate with the same Homevolt battery."
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"credentials": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "The local password configured for your Homevolt battery."
},
"description": "This device requires a password to connect. Please enter the password for {host}."
},
"reauth_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::homevolt::config::step::credentials::data_description::password%]"
},
"description": "Authentication failed for the Homevolt battery at {host}. Please re-enter the password.",
"title": "[%key:common::config_flow::title::reauth%]"
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"
},
"data_description": {
"host": "The IP address or hostname of your Homevolt battery on your local network."
},
"description": "Connect Home Assistant to your Homevolt battery over the local network."
},
"zeroconf_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::homevolt::config::step::credentials::data_description::password%]"
},
"description": "Do you want to set up the Homevolt battery at {host}?"
}
}
},
"entity": {
"sensor": {
"available_charging_energy": {
"name": "Available charging energy"
},
"available_charging_power": {
"name": "Available charging power"
},
"available_discharge_energy": {
"name": "Available discharge energy"
},
"available_discharge_power": {
"name": "Available discharge power"
},
"average_rssi": {
"name": "Average RSSI"
},
"battery_state_of_charge": {
"name": "Battery state of charge"
},
"charge_cycles": {
"unit_of_measurement": "cycles"
},
"energy_exported": {
"name": "Energy exported"
},
"energy_imported": {
"name": "Energy imported"
},
"l1_current": {
"name": "L1 current"
},
"l1_l2_voltage": {
"name": "L1-L2 voltage"
},
"l1_power": {
"name": "L1 power"
},
"l1_voltage": {
"name": "L1 voltage"
},
"l2_current": {
"name": "L2 current"
},
"l2_l3_voltage": {
"name": "L2-L3 voltage"
},
"l2_power": {
"name": "L2 power"
},
"l2_voltage": {
"name": "L2 voltage"
},
"l3_current": {
"name": "L3 current"
},
"l3_l1_voltage": {
"name": "L3-L1 voltage"
},
"l3_power": {
"name": "L3 power"
},
"l3_voltage": {
"name": "L3 voltage"
},
"power": {
"name": "Power"
},
"rssi": {
"name": "RSSI"
},
"schedule_id": {
"name": "Schedule ID"
},
"schedule_max_discharge": {
"name": "Schedule max discharge"
},
"schedule_max_power": {
"name": "Schedule max power"
},
"schedule_power_setpoint": {
"name": "Schedule power setpoint"
},
"schedule_type": {
"name": "Schedule type",
"state": {
"frequency_reserve": "Frequency reserve",
"full_solar_export": "Full solar export",
"grid_charge": "Grid charge",
"grid_charge_discharge": "Grid charge/discharge",
"grid_discharge": "Grid discharge",
"idle": "Idle",
"inverter_charge": "Inverter charge",
"inverter_discharge": "Inverter discharge",
"solar_charge": "Solar charge",
"solar_charge_discharge": "Solar charge/discharge"
}
},
"system_temperature": {
"name": "System temperature"
},
"tmax": {
"name": "Maximum temperature"
},
"tmin": {
"name": "Minimum temperature"
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More