mirror of
https://github.com/home-assistant/core.git
synced 2026-01-24 00:23:02 +01:00
Compare commits
215 Commits
homevolt_t
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c1b44aae2 | ||
|
|
99827a86b4 | ||
|
|
846b139e05 | ||
|
|
d66c7bf38b | ||
|
|
e77a60df3a | ||
|
|
6b386bbc8f | ||
|
|
8983d06d05 | ||
|
|
12e9241f71 | ||
|
|
1e1445f393 | ||
|
|
1b08b578a8 | ||
|
|
e469e50f76 | ||
|
|
9046ae1602 | ||
|
|
f1bf2625e6 | ||
|
|
1451af72ff | ||
|
|
26311e9480 | ||
|
|
c208b06c6a | ||
|
|
be373a76a7 | ||
|
|
5721c6c168 | ||
|
|
0843cd761f | ||
|
|
ff43003ce3 | ||
|
|
8e0f905aca | ||
|
|
2b730069d7 | ||
|
|
4d87627091 | ||
|
|
d9eff759dc | ||
|
|
9c3ffda4d2 | ||
|
|
fa30ed1dd8 | ||
|
|
947ed121dc | ||
|
|
9448f52d4a | ||
|
|
54be76f0ab | ||
|
|
32cd649fe4 | ||
|
|
69dc711466 | ||
|
|
78212245dd | ||
|
|
5bbc39bd88 | ||
|
|
6b14eb7ad1 | ||
|
|
83a53dea94 | ||
|
|
4fb89e68a7 | ||
|
|
5202ddf095 | ||
|
|
f7d7a4502e | ||
|
|
c7417d77b5 | ||
|
|
22018f1f80 | ||
|
|
22c6704d81 | ||
|
|
0552934b3c | ||
|
|
bbe1d28e88 | ||
|
|
b700a27c8f | ||
|
|
0566a668a9 | ||
|
|
94f636bc2d | ||
|
|
a6e7546142 | ||
|
|
493319894b | ||
|
|
987396722b | ||
|
|
4f52b0363d | ||
|
|
52e18ed6f6 | ||
|
|
4180175fd3 | ||
|
|
e39ee8cae7 | ||
|
|
c214193087 | ||
|
|
2d84847be5 | ||
|
|
0d69fd4535 | ||
|
|
56f556864c | ||
|
|
c1b03dc553 | ||
|
|
07e76578e6 | ||
|
|
bc45fd4e45 | ||
|
|
0ea03f549c | ||
|
|
0ee46dbf5d | ||
|
|
e12f394f8e | ||
|
|
b40046264d | ||
|
|
22afa1d248 | ||
|
|
8920ffbcdb | ||
|
|
a447c1b42e | ||
|
|
50211f75ed | ||
|
|
27117c9d17 | ||
|
|
7c4cdd57b6 | ||
|
|
6af5698645 | ||
|
|
75db2cde40 | ||
|
|
329dd05434 | ||
|
|
53c53d03e0 | ||
|
|
360b394d03 | ||
|
|
a663d55632 | ||
|
|
3fd266a513 | ||
|
|
442c1d6242 | ||
|
|
0e2aae02f6 | ||
|
|
3227a6e49f | ||
|
|
9d0cfb628b | ||
|
|
4578fe0260 | ||
|
|
0d92708108 | ||
|
|
cceb50071b | ||
|
|
62f296c9dd | ||
|
|
ea1f280494 | ||
|
|
67108a2fc8 | ||
|
|
1ccbd5124e | ||
|
|
818af90a7b | ||
|
|
23bc78fa25 | ||
|
|
0b1cc7638f | ||
|
|
c291a2fbc1 | ||
|
|
7379a4ff4b | ||
|
|
ddcf5cb749 | ||
|
|
4b10a542b0 | ||
|
|
beea9fa74b | ||
|
|
ce8fd16456 | ||
|
|
2172d15489 | ||
|
|
0cfa0ed670 | ||
|
|
f6839913d8 | ||
|
|
8fa01497ee | ||
|
|
e077c65a77 | ||
|
|
7c49656fa8 | ||
|
|
1730479c8d | ||
|
|
bc28c8fd3c | ||
|
|
c3616fd5df | ||
|
|
6b97f2ac06 | ||
|
|
deefcbcbe4 | ||
|
|
e84aeb9f99 | ||
|
|
ade3d8a657 | ||
|
|
a65d9032ff | ||
|
|
b950a4eaf4 | ||
|
|
3fe91751f5 | ||
|
|
6ee58b96ca | ||
|
|
d1404e7905 | ||
|
|
7c34191813 | ||
|
|
7540d04779 | ||
|
|
d828130670 | ||
|
|
2ec6c08bd7 | ||
|
|
48852bab7a | ||
|
|
7d370f4513 | ||
|
|
9d97791faf | ||
|
|
4fe8982b68 | ||
|
|
8248ade211 | ||
|
|
572c0e393c | ||
|
|
d25f2bab9a | ||
|
|
916812dd58 | ||
|
|
cea84aa3c8 | ||
|
|
af83fa809a | ||
|
|
8c997cb6a9 | ||
|
|
4ccb6e4c8b | ||
|
|
37a45b1a92 | ||
|
|
ac84211702 | ||
|
|
c209ddbb24 | ||
|
|
66ab50c737 | ||
|
|
46074b0f9c | ||
|
|
56d8913159 | ||
|
|
c1bbfec203 | ||
|
|
290c2fd5b6 | ||
|
|
e472180fb2 | ||
|
|
a1ced9a259 | ||
|
|
80a700f668 | ||
|
|
54fc963297 | ||
|
|
59776adeb3 | ||
|
|
af53daa43c | ||
|
|
65123609ea | ||
|
|
847adcf977 | ||
|
|
f0dc66cb53 | ||
|
|
54275a0ee4 | ||
|
|
964f36bc50 | ||
|
|
e83cbc3fc5 | ||
|
|
e26d90d82b | ||
|
|
da52482365 | ||
|
|
6ba16ee9e9 | ||
|
|
fa29d8180f | ||
|
|
5d43efb22d | ||
|
|
3539c4bcec | ||
|
|
3e3ec4616c | ||
|
|
907861effd | ||
|
|
862a2bc95c | ||
|
|
60f498c1fa | ||
|
|
bb3617ac08 | ||
|
|
48d1bd13fa | ||
|
|
8555bc9da0 | ||
|
|
9260394883 | ||
|
|
8503637a80 | ||
|
|
c993cd9bee | ||
|
|
171013c0d0 | ||
|
|
c8a7aa359e | ||
|
|
88d8951657 | ||
|
|
b66ab3cf92 | ||
|
|
253b32abd6 | ||
|
|
cc20072c86 | ||
|
|
f86db56d48 | ||
|
|
3e2ebb8ebb | ||
|
|
6e7b206788 | ||
|
|
cee007b0b0 | ||
|
|
bd24c27bc9 | ||
|
|
49bd26da86 | ||
|
|
49c42b9ad0 | ||
|
|
411491dc45 | ||
|
|
47383a499e | ||
|
|
f9aa307cb2 | ||
|
|
7c6a31861e | ||
|
|
b2b25ca28c | ||
|
|
ad9efab16a | ||
|
|
e967d33911 | ||
|
|
86bacdbdde | ||
|
|
644a40674d | ||
|
|
2cf813758e | ||
|
|
ad47eccf5f | ||
|
|
581b554a66 | ||
|
|
e4def9eb03 | ||
|
|
5f2d17faf6 | ||
|
|
e17565c069 | ||
|
|
b856e04825 | ||
|
|
67e676df4f | ||
|
|
e2e7485e30 | ||
|
|
043a0b5aa6 | ||
|
|
457af066c8 | ||
|
|
3040fa3412 | ||
|
|
1293e7ed70 | ||
|
|
3e81cea99f | ||
|
|
4ce2dae701 | ||
|
|
a14a8c4e43 | ||
|
|
89e734d2de | ||
|
|
26c81f29e9 | ||
|
|
ce82e88919 | ||
|
|
60316a1232 | ||
|
|
aca4d3c5e6 | ||
|
|
9a93096e4b | ||
|
|
3b68aa0776 | ||
|
|
6ca60f0260 | ||
|
|
fc281b2fae | ||
|
|
3b111287d5 |
784
.claude/skills/integrations/SKILL.md
Normal file
784
.claude/skills/integrations/SKILL.md
Normal file
@@ -0,0 +1,784 @@
|
||||
---
|
||||
name: Home Assistant Integration knowledge
|
||||
description: Everything you need to know to build, test and review Home Assistant Integrations. If you're looking at an integration, you must use this as your primary reference.
|
||||
---
|
||||
|
||||
### File Locations
|
||||
- **Integration code**: `./homeassistant/components/<integration_domain>/`
|
||||
- **Integration tests**: `./tests/components/<integration_domain>/`
|
||||
|
||||
## Integration Templates
|
||||
|
||||
### Standard Integration Structure
|
||||
```
|
||||
homeassistant/components/my_integration/
|
||||
├── __init__.py # Entry point with async_setup_entry
|
||||
├── manifest.json # Integration metadata and dependencies
|
||||
├── const.py # Domain and constants
|
||||
├── config_flow.py # UI configuration flow
|
||||
├── coordinator.py # Data update coordinator (if needed)
|
||||
├── entity.py # Base entity class (if shared patterns)
|
||||
├── sensor.py # Sensor platform
|
||||
├── strings.json # User-facing text and translations
|
||||
├── services.yaml # Service definitions (if applicable)
|
||||
└── quality_scale.yaml # Quality scale rule status
|
||||
```
|
||||
|
||||
An integration can have platforms as needed (e.g., `sensor.py`, `switch.py`, etc.). The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
- **Repairs**: [`platform-repairs.md`](platform-repairs.md) for user-actionable repair issues
|
||||
|
||||
### Minimal Integration Checklist
|
||||
- [ ] `manifest.json` with required fields (domain, name, codeowners, etc.)
|
||||
- [ ] `__init__.py` with `async_setup_entry` and `async_unload_entry`
|
||||
- [ ] `config_flow.py` with UI configuration support
|
||||
- [ ] `const.py` with `DOMAIN` constant
|
||||
- [ ] `strings.json` with at least config flow text
|
||||
- [ ] Platform files (`sensor.py`, etc.) as needed
|
||||
- [ ] `quality_scale.yaml` with rule status tracking
|
||||
|
||||
## Integration Quality Scale
|
||||
|
||||
Home Assistant uses an Integration Quality Scale to ensure code quality and consistency. The quality level determines which rules apply:
|
||||
|
||||
### Quality Scale Levels
|
||||
- **Bronze**: Basic requirements (ALL Bronze rules are mandatory)
|
||||
- **Silver**: Enhanced functionality
|
||||
- **Gold**: Advanced features
|
||||
- **Platinum**: Highest quality standards
|
||||
|
||||
### Quality Scale Progression
|
||||
- **Bronze → Silver**: Add entity unavailability, parallel updates, auth flows
|
||||
- **Silver → Gold**: Add device management, diagnostics, translations
|
||||
- **Gold → Platinum**: Add strict typing, async dependencies, websession injection
|
||||
|
||||
### How Rules Apply
|
||||
1. **Check `manifest.json`**: Look for `"quality_scale"` key to determine integration level
|
||||
2. **Bronze Rules**: Always required for any integration with quality scale
|
||||
3. **Higher Tier Rules**: Only apply if integration targets that tier or higher
|
||||
4. **Rule Status**: Check `quality_scale.yaml` in integration folder for:
|
||||
- `done`: Rule implemented
|
||||
- `exempt`: Rule doesn't apply (with reason in comment)
|
||||
- `todo`: Rule needs implementation
|
||||
|
||||
### Example `quality_scale.yaml` Structure
|
||||
```yaml
|
||||
rules:
|
||||
# Bronze (mandatory)
|
||||
config-flow: done
|
||||
entity-unique-id: done
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
|
||||
# Silver (if targeting Silver+)
|
||||
entity-unavailable: done
|
||||
parallel-updates: done
|
||||
|
||||
# Gold (if targeting Gold+)
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
||||
# Platinum (if targeting Platinum)
|
||||
strict-typing: done
|
||||
```
|
||||
|
||||
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
|
||||
|
||||
## Code Organization
|
||||
|
||||
### Core Locations
|
||||
- Shared constants: `homeassistant/const.py` (use these instead of hardcoding)
|
||||
- Integration structure:
|
||||
- `homeassistant/components/{domain}/const.py` - Constants
|
||||
- `homeassistant/components/{domain}/models.py` - Data models
|
||||
- `homeassistant/components/{domain}/coordinator.py` - Update coordinator
|
||||
- `homeassistant/components/{domain}/config_flow.py` - Configuration flow
|
||||
- `homeassistant/components/{domain}/{platform}.py` - Platform implementations
|
||||
|
||||
### Common Modules
|
||||
- **coordinator.py**: Centralize data fetching logic
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=1),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
- **entity.py**: Base entity definitions to reduce duplication
|
||||
```python
|
||||
class MyEntity(CoordinatorEntity[MyCoordinator]):
|
||||
_attr_has_entity_name = True
|
||||
```
|
||||
|
||||
### Runtime Data Storage
|
||||
- **Use ConfigEntry.runtime_data**: Store non-persistent runtime data
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyIntegrationConfigEntry) -> bool:
|
||||
client = MyClient(entry.data[CONF_HOST])
|
||||
entry.runtime_data = client
|
||||
```
|
||||
|
||||
### Manifest Requirements
|
||||
- **Required Fields**: `domain`, `name`, `codeowners`, `integration_type`, `documentation`, `requirements`
|
||||
- **Integration Types**: `device`, `hub`, `service`, `system`, `helper`
|
||||
- **IoT Class**: Always specify connectivity method (e.g., `cloud_polling`, `local_polling`, `local_push`)
|
||||
- **Discovery Methods**: Add when applicable: `zeroconf`, `dhcp`, `bluetooth`, `ssdp`, `usb`
|
||||
- **Dependencies**: Include platform dependencies (e.g., `application_credentials`, `bluetooth_adapters`)
|
||||
|
||||
### Config Flow Patterns
|
||||
- **Version Control**: Always set `VERSION = 1` and `MINOR_VERSION = 1`
|
||||
- **Unique ID Management**:
|
||||
```python
|
||||
await self.async_set_unique_id(device_unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
```
|
||||
- **Error Handling**: Define errors in `strings.json` under `config.error`
|
||||
- **Step Methods**: Use standard naming (`async_step_user`, `async_step_discovery`, etc.)
|
||||
|
||||
### Integration Ownership
|
||||
- **manifest.json**: Add GitHub usernames to `codeowners`:
|
||||
```json
|
||||
{
|
||||
"domain": "my_integration",
|
||||
"name": "My Integration",
|
||||
"codeowners": ["@me"]
|
||||
}
|
||||
```
|
||||
|
||||
### Async Dependencies (Platinum)
|
||||
- **Requirement**: All dependencies must use asyncio
|
||||
- Ensures efficient task handling without thread context switching
|
||||
|
||||
### WebSession Injection (Platinum)
|
||||
- **Pass WebSession**: Support passing web sessions to dependencies
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Set up integration from config entry."""
|
||||
client = MyClient(entry.data[CONF_HOST], async_get_clientsession(hass))
|
||||
```
|
||||
- For cookies: Use `async_create_clientsession` (aiohttp) or `create_async_httpx_client` (httpx)
|
||||
|
||||
### Data Update Coordinator
|
||||
- **Standard Pattern**: Use for efficient data management
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=5),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
self.client = client
|
||||
|
||||
async def _async_update_data(self):
|
||||
try:
|
||||
return await self.client.fetch_data()
|
||||
except ApiError as err:
|
||||
raise UpdateFailed(f"API communication error: {err}")
|
||||
```
|
||||
- **Error Types**: Use `UpdateFailed` for API errors, `ConfigEntryAuthFailed` for auth issues
|
||||
- **Config Entry**: Always pass `config_entry` parameter to coordinator - it's accepted and recommended
|
||||
|
||||
## Integration Guidelines
|
||||
|
||||
### Configuration Flow
|
||||
- **UI Setup Required**: All integrations must support configuration via UI
|
||||
- **Manifest**: Set `"config_flow": true` in `manifest.json`
|
||||
- **Data Storage**:
|
||||
- Connection-critical config: Store in `ConfigEntry.data`
|
||||
- Non-critical settings: Store in `ConfigEntry.options`
|
||||
- **Validation**: Always validate user input before creating entries
|
||||
- **Config Entry Naming**:
|
||||
- ❌ Do NOT allow users to set config entry names in config flows
|
||||
- Names are automatically generated or can be customized later in UI
|
||||
- ✅ Exception: Helper integrations MAY allow custom names in config flow
|
||||
- **Connection Testing**: Test device/service connection during config flow:
|
||||
```python
|
||||
try:
|
||||
await client.get_data()
|
||||
except MyException:
|
||||
errors["base"] = "cannot_connect"
|
||||
```
|
||||
- **Duplicate Prevention**: Prevent duplicate configurations:
|
||||
```python
|
||||
# Using unique ID
|
||||
await self.async_set_unique_id(identifier)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Using unique data
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
```
|
||||
|
||||
### Reauthentication Support
|
||||
- **Required Method**: Implement `async_step_reauth` in config flow
|
||||
- **Credential Updates**: Allow users to update credentials without re-adding
|
||||
- **Validation**: Verify account matches existing unique ID:
|
||||
```python
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]}
|
||||
)
|
||||
```
|
||||
|
||||
### Reconfiguration Flow
|
||||
- **Purpose**: Allow configuration updates without removing device
|
||||
- **Implementation**: Add `async_step_reconfigure` method
|
||||
- **Validation**: Prevent changing underlying account with `_abort_if_unique_id_mismatch`
|
||||
|
||||
### Device Discovery
|
||||
- **Manifest Configuration**: Add discovery method (zeroconf, dhcp, etc.)
|
||||
```json
|
||||
{
|
||||
"zeroconf": ["_mydevice._tcp.local."]
|
||||
}
|
||||
```
|
||||
- **Discovery Handler**: Implement appropriate `async_step_*` method:
|
||||
```python
|
||||
async def async_step_zeroconf(self, discovery_info):
|
||||
"""Handle zeroconf discovery."""
|
||||
await self.async_set_unique_id(discovery_info.properties["serialno"])
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
```
|
||||
- **Network Updates**: Use discovery to update dynamic IP addresses
|
||||
|
||||
### Network Discovery Implementation
|
||||
- **Zeroconf/mDNS**: Use async instances
|
||||
```python
|
||||
aiozc = await zeroconf.async_get_async_instance(hass)
|
||||
```
|
||||
- **SSDP Discovery**: Register callbacks with cleanup
|
||||
```python
|
||||
entry.async_on_unload(
|
||||
ssdp.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"st": "urn:schemas-upnp-org:device:ZonePlayer:1"}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
### Bluetooth Integration
|
||||
- **Manifest Dependencies**: Add `bluetooth_adapters` to dependencies
|
||||
- **Connectable**: Set `"connectable": true` for connection-required devices
|
||||
- **Scanner Usage**: Always use shared scanner instance
|
||||
```python
|
||||
scanner = bluetooth.async_get_scanner()
|
||||
entry.async_on_unload(
|
||||
bluetooth.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"service_uuid": "example_uuid"},
|
||||
bluetooth.BluetoothScanningMode.ACTIVE
|
||||
)
|
||||
)
|
||||
```
|
||||
- **Connection Handling**: Never reuse `BleakClient` instances, use 10+ second timeouts
|
||||
|
||||
### Setup Validation
|
||||
- **Test Before Setup**: Verify integration can be set up in `async_setup_entry`
|
||||
- **Exception Handling**:
|
||||
- `ConfigEntryNotReady`: Device offline or temporary failure
|
||||
- `ConfigEntryAuthFailed`: Authentication issues
|
||||
- `ConfigEntryError`: Unresolvable setup problems
|
||||
|
||||
### Config Entry Unloading
|
||||
- **Required**: Implement `async_unload_entry` for runtime removal/reload
|
||||
- **Platform Unloading**: Use `hass.config_entries.async_unload_platforms`
|
||||
- **Cleanup**: Register callbacks with `entry.async_on_unload`:
|
||||
```python
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
entry.runtime_data.listener() # Clean up resources
|
||||
return unload_ok
|
||||
```
|
||||
|
||||
### Service Actions
|
||||
- **Registration**: Register all service actions in `async_setup`, NOT in `async_setup_entry`
|
||||
- **Validation**: Check config entry existence and loaded state:
|
||||
```python
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def service_action(call: ServiceCall) -> ServiceResponse:
|
||||
if not (entry := hass.config_entries.async_get_entry(call.data[ATTR_CONFIG_ENTRY_ID])):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
```
|
||||
- **Exception Handling**: Raise appropriate exceptions:
|
||||
```python
|
||||
# For invalid input
|
||||
if end_date < start_date:
|
||||
raise ServiceValidationError("End date must be after start date")
|
||||
|
||||
# For service errors
|
||||
try:
|
||||
await client.set_schedule(start_date, end_date)
|
||||
except MyConnectionError as err:
|
||||
raise HomeAssistantError("Could not connect to the schedule") from err
|
||||
```
|
||||
|
||||
### Service Registration Patterns
|
||||
- **Entity Services**: Register on platform setup
|
||||
```python
|
||||
platform.async_register_entity_service(
|
||||
"my_entity_service",
|
||||
{vol.Required("parameter"): cv.string},
|
||||
"handle_service_method"
|
||||
)
|
||||
```
|
||||
- **Service Schema**: Always validate input
|
||||
```python
|
||||
SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Required("entity_id"): cv.entity_ids,
|
||||
vol.Required("parameter"): cv.string,
|
||||
vol.Optional("timeout", default=30): cv.positive_int,
|
||||
})
|
||||
```
|
||||
- **Services File**: Create `services.yaml` with descriptions and field definitions
|
||||
|
||||
### Polling
|
||||
- Use update coordinator pattern when possible
|
||||
- **Polling intervals are NOT user-configurable**: Never add scan_interval, update_interval, or polling frequency options to config flows or config entries
|
||||
- **Integration determines intervals**: Set `update_interval` programmatically based on integration logic, not user input
|
||||
- **Minimum Intervals**:
|
||||
- Local network: 5 seconds
|
||||
- Cloud services: 60 seconds
|
||||
- **Parallel Updates**: Specify number of concurrent updates:
|
||||
```python
|
||||
PARALLEL_UPDATES = 1 # Serialize updates to prevent overwhelming device
|
||||
# OR
|
||||
PARALLEL_UPDATES = 0 # Unlimited (for coordinator-based or read-only)
|
||||
```
|
||||
|
||||
## Entity Development
|
||||
|
||||
### Unique IDs
|
||||
- **Required**: Every entity must have a unique ID for registry tracking
|
||||
- Must be unique per platform (not per integration)
|
||||
- Don't include integration domain or platform in ID
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
def __init__(self, device_id: str) -> None:
|
||||
self._attr_unique_id = f"{device_id}_temperature"
|
||||
```
|
||||
|
||||
**Acceptable ID Sources**:
|
||||
- Device serial numbers
|
||||
- MAC addresses (formatted using `format_mac` from device registry)
|
||||
- Physical identifiers (printed/EEPROM)
|
||||
- Config entry ID as last resort: `f"{entry.entry_id}-battery"`
|
||||
|
||||
**Never Use**:
|
||||
- IP addresses, hostnames, URLs
|
||||
- Device names
|
||||
- Email addresses, usernames
|
||||
|
||||
### Entity Descriptions
|
||||
- **Lambda/Anonymous Functions**: Often used in EntityDescription for value transformation
|
||||
- **Multiline Lambdas**: When lambdas exceed line length, wrap in parentheses for readability
|
||||
- **Bad pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: round(data["temp_value"] * 1.8 + 32, 1) if data.get("temp_value") is not None else None, # ❌ Too long
|
||||
)
|
||||
```
|
||||
- **Good pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: ( # ✅ Parenthesis on same line as lambda
|
||||
round(data["temp_value"] * 1.8 + 32, 1)
|
||||
if data.get("temp_value") is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
### Entity Naming
|
||||
- **Use has_entity_name**: Set `_attr_has_entity_name = True`
|
||||
- **For specific fields**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
def __init__(self, device: Device, field: str) -> None:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
)
|
||||
self._attr_name = field # e.g., "temperature", "humidity"
|
||||
```
|
||||
- **For device itself**: Set `_attr_name = None`
|
||||
|
||||
### Event Lifecycle Management
|
||||
- **Subscribe in `async_added_to_hass`**:
|
||||
```python
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to events."""
|
||||
self.async_on_remove(
|
||||
self.client.events.subscribe("my_event", self._handle_event)
|
||||
)
|
||||
```
|
||||
- **Unsubscribe in `async_will_remove_from_hass`** if not using `async_on_remove`
|
||||
- Never subscribe in `__init__` or other methods
|
||||
|
||||
### State Handling
|
||||
- Unknown values: Use `None` (not "unknown" or "unavailable")
|
||||
- Availability: Implement `available()` property instead of using "unavailable" state
|
||||
|
||||
### Entity Availability
|
||||
- **Mark Unavailable**: When data cannot be fetched from device/service
|
||||
- **Coordinator Pattern**:
|
||||
```python
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.identifier in self.coordinator.data
|
||||
```
|
||||
- **Direct Update Pattern**:
|
||||
```python
|
||||
async def async_update(self) -> None:
|
||||
"""Update entity."""
|
||||
try:
|
||||
data = await self.client.get_data()
|
||||
except MyException:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = data.value
|
||||
```
|
||||
|
||||
### Extra State Attributes
|
||||
- All attribute keys must always be present
|
||||
- Unknown values: Use `None`
|
||||
- Provide descriptive attributes
|
||||
|
||||
## Device Management
|
||||
|
||||
### Device Registry
|
||||
- **Create Devices**: Group related entities under devices
|
||||
- **Device Info**: Provide comprehensive metadata:
|
||||
```python
|
||||
_attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, device.mac)},
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
manufacturer="My Company",
|
||||
model="My Sensor",
|
||||
sw_version=device.version,
|
||||
)
|
||||
```
|
||||
- For services: Add `entry_type=DeviceEntryType.SERVICE`
|
||||
|
||||
### Dynamic Device Addition
|
||||
- **Auto-detect New Devices**: After initial setup
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data)
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities([MySensor(coordinator, device_id) for device_id in new_devices])
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
```
|
||||
|
||||
### Stale Device Removal
|
||||
- **Auto-remove**: When devices disappear from hub/account
|
||||
- **Device Registry Update**:
|
||||
```python
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
```
|
||||
- **Manual Deletion**: Implement `async_remove_config_entry_device` when needed
|
||||
|
||||
### Entity Categories
|
||||
- **Required**: Assign appropriate category to entities
|
||||
- **Implementation**: Set `_attr_entity_category`
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
```
|
||||
- Categories include: `DIAGNOSTIC` for system/technical information
|
||||
|
||||
### Device Classes
|
||||
- **Use When Available**: Set appropriate device class for entity type
|
||||
```python
|
||||
class MyTemperatureSensor(SensorEntity):
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
```
|
||||
- Provides context for: unit conversion, voice control, UI representation
|
||||
|
||||
### Disabled by Default
|
||||
- **Disable Noisy/Less Popular Entities**: Reduce resource usage
|
||||
```python
|
||||
class MySignalStrengthSensor(SensorEntity):
|
||||
_attr_entity_registry_enabled_default = False
|
||||
```
|
||||
- Target: frequently changing states, technical diagnostics
|
||||
|
||||
### Entity Translations
|
||||
- **Required with has_entity_name**: Support international users
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "phase_voltage"
|
||||
```
|
||||
- Create `strings.json` with translations:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"phase_voltage": {
|
||||
"name": "Phase voltage"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Exception Translations (Gold)
|
||||
- **Translatable Errors**: Use translation keys for user-facing exceptions
|
||||
- **Implementation**:
|
||||
```python
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_date_before_start_date",
|
||||
)
|
||||
```
|
||||
- Add to `strings.json`:
|
||||
```json
|
||||
{
|
||||
"exceptions": {
|
||||
"end_date_before_start_date": {
|
||||
"message": "The end date cannot be before the start date."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Icon Translations (Gold)
|
||||
- **Dynamic Icons**: Support state and range-based icon selection
|
||||
- **State-based Icons**:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree",
|
||||
"state": {
|
||||
"high": "mdi:tree-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **Range-based Icons** (for numeric values):
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"battery_level": {
|
||||
"default": "mdi:battery-unknown",
|
||||
"range": {
|
||||
"0": "mdi:battery-outline",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- **Location**: `tests/components/{domain}/`
|
||||
- **Coverage Requirement**: Above 95% test coverage for all modules
|
||||
- **Best Practices**:
|
||||
- Use pytest fixtures from `tests.common`
|
||||
- Mock all external dependencies
|
||||
- Use snapshots for complex data structures
|
||||
- Follow existing test patterns
|
||||
|
||||
### Config Flow Testing
|
||||
- **100% Coverage Required**: All config flow paths must be tested
|
||||
- **Test Scenarios**:
|
||||
- All flow initiation methods (user, discovery, import)
|
||||
- Successful configuration paths
|
||||
- Error recovery scenarios
|
||||
- Prevention of duplicate entries
|
||||
- Flow completion after errors
|
||||
|
||||
### Testing
|
||||
- **Integration-specific tests** (recommended):
|
||||
```bash
|
||||
pytest ./tests/components/<integration_domain> \
|
||||
--cov=homeassistant.components.<integration_domain> \
|
||||
--cov-report term-missing \
|
||||
--durations-min=1 \
|
||||
--durations=0 \
|
||||
--numprocesses=auto
|
||||
```
|
||||
|
||||
### Testing Best Practices
|
||||
- **Never access `hass.data` directly** - Use fixtures and proper integration setup instead
|
||||
- **Use snapshot testing** - For verifying entity states and attributes
|
||||
- **Test through integration setup** - Don't test entities in isolation
|
||||
- **Mock external APIs** - Use fixtures with realistic JSON data
|
||||
- **Verify registries** - Ensure entities are properly registered with devices
|
||||
|
||||
### Config Flow Testing Template
|
||||
```python
|
||||
async def test_user_flow_success(hass, mock_api):
|
||||
"""Test successful user flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
# Test form submission
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "My Device"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
|
||||
async def test_flow_connection_error(hass, mock_api_error):
|
||||
"""Test connection error handling."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
```
|
||||
|
||||
### Entity Testing Patterns
|
||||
```python
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Overridden fixture to specify platforms to test."""
|
||||
return [Platform.SENSOR] # Or another specific platform as needed.
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the sensor entities."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
# Ensure entities are correctly assigned to device
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, "device_unique_id")}
|
||||
)
|
||||
assert device_entry
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, mock_config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
assert entity_entry.device_id == device_entry.id
|
||||
```
|
||||
|
||||
### Mock Patterns
|
||||
```python
|
||||
# Modern integration fixture setup
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="My Integration",
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: "127.0.0.1", CONF_API_KEY: "test_key"},
|
||||
unique_id="device_unique_id",
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_device_api() -> Generator[MagicMock]:
|
||||
"""Return a mocked device API."""
|
||||
with patch("homeassistant.components.my_integration.MyDeviceAPI", autospec=True) as api_mock:
|
||||
api = api_mock.return_value
|
||||
api.get_data.return_value = MyDeviceData.from_json(
|
||||
load_fixture("device_data.json", DOMAIN)
|
||||
)
|
||||
yield api
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return PLATFORMS
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_device_api: MagicMock,
|
||||
platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
with patch("homeassistant.components.my_integration.PLATFORMS", platforms):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return mock_config_entry
|
||||
```
|
||||
|
||||
## Debugging & Troubleshooting
|
||||
|
||||
### Common Issues & Solutions
|
||||
- **Integration won't load**: Check `manifest.json` syntax and required fields
|
||||
- **Entities not appearing**: Verify `unique_id` and `has_entity_name` implementation
|
||||
- **Config flow errors**: Check `strings.json` entries and error handling
|
||||
- **Discovery not working**: Verify manifest discovery configuration and callbacks
|
||||
- **Tests failing**: Check mock setup and async context
|
||||
|
||||
### Debug Logging Setup
|
||||
```python
|
||||
# Enable debug logging in tests
|
||||
caplog.set_level(logging.DEBUG, logger="my_integration")
|
||||
|
||||
# In integration code - use proper logging
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_LOGGER.debug("Processing data: %s", data) # Use lazy logging
|
||||
```
|
||||
|
||||
### Validation Commands
|
||||
```bash
|
||||
# Check specific integration
|
||||
python -m script.hassfest --integration-path homeassistant/components/my_integration
|
||||
|
||||
# Validate quality scale
|
||||
# Check quality_scale.yaml against current rules
|
||||
|
||||
# Run integration tests with coverage
|
||||
pytest ./tests/components/my_integration \
|
||||
--cov=homeassistant.components.my_integration \
|
||||
--cov-report term-missing
|
||||
```
|
||||
19
.claude/skills/integrations/platform-diagnostics.md
Normal file
19
.claude/skills/integrations/platform-diagnostics.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Integration Diagnostics
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/diagnostics.py`.
|
||||
|
||||
- **Required**: Implement diagnostic data collection
|
||||
- **Implementation**:
|
||||
```python
|
||||
TO_REDACT = [CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE]
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: MyConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": entry.runtime_data.data,
|
||||
}
|
||||
```
|
||||
- **Security**: Never expose passwords, tokens, or sensitive coordinates
|
||||
55
.claude/skills/integrations/platform-repairs.md
Normal file
55
.claude/skills/integrations/platform-repairs.md
Normal file
@@ -0,0 +1,55 @@
|
||||
# Repairs platform
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/repairs.py`.
|
||||
|
||||
- **Actionable Issues Required**: All repair issues must be actionable for end users
|
||||
- **Issue Content Requirements**:
|
||||
- Clearly explain what is happening
|
||||
- Provide specific steps users need to take to resolve the issue
|
||||
- Use friendly, helpful language
|
||||
- Include relevant context (device names, error details, etc.)
|
||||
- **Implementation**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"outdated_version",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="outdated_version",
|
||||
)
|
||||
```
|
||||
- **Translation Strings Requirements**: Must contain user-actionable text in `strings.json`:
|
||||
```json
|
||||
{
|
||||
"issues": {
|
||||
"outdated_version": {
|
||||
"title": "Device firmware is outdated",
|
||||
"description": "Your device firmware version {current_version} is below the minimum required version {min_version}. To fix this issue: 1) Open the manufacturer's mobile app, 2) Navigate to device settings, 3) Select 'Update Firmware', 4) Wait for the update to complete, then 5) Restart Home Assistant."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **String Content Must Include**:
|
||||
- What the problem is
|
||||
- Why it matters
|
||||
- Exact steps to resolve (numbered list when multiple steps)
|
||||
- What to expect after following the steps
|
||||
- **Avoid Vague Instructions**: Don't just say "update firmware" - provide specific steps
|
||||
- **Severity Guidelines**:
|
||||
- `CRITICAL`: Reserved for extreme scenarios only
|
||||
- `ERROR`: Requires immediate user attention
|
||||
- `WARNING`: Indicates future potential breakage
|
||||
- **Additional Attributes**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass, DOMAIN, "issue_id",
|
||||
breaks_in_ha_version="2024.1.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="issue_description",
|
||||
)
|
||||
```
|
||||
- Only create issues for problems users can potentially resolve
|
||||
@@ -91,6 +91,7 @@ components: &components
|
||||
- homeassistant/components/input_number/**
|
||||
- homeassistant/components/input_select/**
|
||||
- homeassistant/components/input_text/**
|
||||
- homeassistant/components/labs/**
|
||||
- homeassistant/components/logbook/**
|
||||
- homeassistant/components/logger/**
|
||||
- homeassistant/components/lovelace/**
|
||||
|
||||
920
.github/copilot-instructions.md
vendored
920
.github/copilot-instructions.md
vendored
File diff suppressed because it is too large
Load Diff
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -30,10 +30,10 @@ jobs:
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -273,7 +273,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -311,7 +311,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -474,10 +474,10 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -519,7 +519,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
41
.github/workflows/ci.yaml
vendored
41
.github/workflows/ci.yaml
vendored
@@ -97,7 +97,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
@@ -247,17 +247,11 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Register yamllint problem matcher
|
||||
- name: Register problem matchers
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Register codespell problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@9d6a3097e0c1865ecce00cfb89fe80f2ee91b547 # v1.0.12
|
||||
@@ -303,7 +297,7 @@ jobs:
|
||||
- &setup-python-matrix
|
||||
name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: &actions-setup-python actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -316,7 +310,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache actions/cache@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: venv
|
||||
key: &key-python-venv >-
|
||||
@@ -380,7 +374,7 @@ jobs:
|
||||
fi
|
||||
- name: Save apt cache
|
||||
if: steps.cache-apt-check.outputs.cache-hit != 'true'
|
||||
uses: &actions-cache-save actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache-save actions/cache/save@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
key: *key-apt-cache
|
||||
@@ -431,7 +425,7 @@ jobs:
|
||||
steps:
|
||||
- &cache-restore-apt
|
||||
name: Restore apt cache
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: &actions-cache-restore actions/cache/restore@8b402f58fbc84540c8b491a91e594a4576fec3d7 # v5.0.2
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
fail-on-cache-miss: true
|
||||
@@ -485,6 +479,22 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.gen_requirements_all validate
|
||||
|
||||
gen-copilot-instructions:
|
||||
name: Check copilot instructions
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs:
|
||||
- info
|
||||
if: |
|
||||
github.event.inputs.pylint-only != 'true'
|
||||
&& github.event.inputs.mypy-only != 'true'
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- name: Run gen_copilot_instructions.py
|
||||
run: |
|
||||
python -m script.gen_copilot_instructions validate
|
||||
|
||||
dependency-review:
|
||||
name: Dependency review
|
||||
runs-on: *runs-on-ubuntu
|
||||
@@ -1193,6 +1203,8 @@ jobs:
|
||||
- pytest-postgres
|
||||
- pytest-mariadb
|
||||
timeout-minutes: 10
|
||||
permissions:
|
||||
id-token: write
|
||||
# codecov/test-results-action currently doesn't support tokenless uploads
|
||||
# therefore we can't run it on forks
|
||||
if: |
|
||||
@@ -1204,8 +1216,9 @@ jobs:
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1
|
||||
uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
|
||||
with:
|
||||
report_type: test_results
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
use_oidc: true
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
uses: actions/ai-inference@a6101c89c6feaecc585efdd8d461f18bb7896f20 # v2.0.5
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"regexp": "^(.+):\\s(marked executable but has no \\(or invalid\\) shebang!.*)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
|
||||
4
.github/workflows/translations.yml
vendored
4
.github/workflows/translations.yml
vendored
@@ -19,10 +19,10 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -31,11 +31,11 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.13.0
|
||||
rev: v0.14.13
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
@@ -455,6 +455,7 @@ homeassistant.components.russound_rio.*
|
||||
homeassistant.components.ruuvi_gateway.*
|
||||
homeassistant.components.ruuvitag_ble.*
|
||||
homeassistant.components.samsungtv.*
|
||||
homeassistant.components.saunum.*
|
||||
homeassistant.components.scene.*
|
||||
homeassistant.components.schedule.*
|
||||
homeassistant.components.schlage.*
|
||||
|
||||
324
AGENTS.md
Normal file
324
AGENTS.md
Normal file
@@ -0,0 +1,324 @@
|
||||
# GitHub Copilot & Claude Code Instructions
|
||||
|
||||
This repository contains the core of Home Assistant, a Python 3 based home automation application.
|
||||
|
||||
## Code Review Guidelines
|
||||
|
||||
**When reviewing code, do NOT comment on:**
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
- **Language Features**: Use the newest features when possible:
|
||||
- Pattern matching
|
||||
- Type hints
|
||||
- f-strings (preferred over `%` or `.format()`)
|
||||
- Dataclasses
|
||||
- Walrus operator
|
||||
|
||||
### Strict Typing (Platinum)
|
||||
- **Comprehensive Type Hints**: Add type hints to all functions, methods, and variables
|
||||
- **Custom Config Entry Types**: When using runtime_data:
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
```
|
||||
- **Library Requirements**: Include `py.typed` file for PEP-561 compliance
|
||||
|
||||
## Code Quality Standards
|
||||
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
|
||||
- **Testing**: pytest with plain functions and fixtures
|
||||
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
||||
|
||||
### Writing Style Guidelines
|
||||
- **Tone**: Friendly and informative
|
||||
- **Perspective**: Use second-person ("you" and "your") for user-facing messages
|
||||
- **Inclusivity**: Use objective, non-discriminatory language
|
||||
- **Clarity**: Write for non-native English speakers
|
||||
- **Formatting in Messages**:
|
||||
- Use backticks for: file paths, filenames, variable names, field entries
|
||||
- Use sentence case for titles and messages (capitalize only the first word and proper nouns)
|
||||
- Avoid abbreviations when possible
|
||||
|
||||
### Documentation Standards
|
||||
- **File Headers**: Short and concise
|
||||
```python
|
||||
"""Integration for Peblar EV chargers."""
|
||||
```
|
||||
- **Method/Function Docstrings**: Required for all
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool:
|
||||
"""Set up Peblar from a config entry."""
|
||||
```
|
||||
- **Comment Style**:
|
||||
- Use clear, descriptive comments
|
||||
- Explain the "why" not just the "what"
|
||||
- Keep code block lines under 80 characters when possible
|
||||
- Use progressive disclosure (simple explanation first, complex details later)
|
||||
|
||||
## Async Programming
|
||||
|
||||
- All external I/O operations must be async
|
||||
- **Best Practices**:
|
||||
- Avoid sleeping in loops
|
||||
- Avoid awaiting in loops - use `gather` instead
|
||||
- No blocking calls
|
||||
- Group executor jobs when possible - switching between event loop and executor is expensive
|
||||
|
||||
### Blocking Operations
|
||||
- **Use Executor**: For blocking I/O operations
|
||||
```python
|
||||
result = await hass.async_add_executor_job(blocking_function, args)
|
||||
```
|
||||
- **Never Block Event Loop**: Avoid file operations, `time.sleep()`, blocking HTTP calls
|
||||
- **Replace with Async**: Use `asyncio.sleep()` instead of `time.sleep()`
|
||||
|
||||
### Thread Safety
|
||||
- **@callback Decorator**: For event loop safe functions
|
||||
```python
|
||||
@callback
|
||||
def async_update_callback(self, event):
|
||||
"""Safe to run in event loop."""
|
||||
self.async_write_ha_state()
|
||||
```
|
||||
- **Sync APIs from Threads**: Use sync versions when calling from non-event loop threads
|
||||
- **Registry Changes**: Must be done in event loop thread
|
||||
|
||||
### Error Handling
|
||||
- **Exception Types**: Choose most specific exception available
|
||||
- `ServiceValidationError`: User input errors (preferred over `ValueError`)
|
||||
- `HomeAssistantError`: Device communication failures
|
||||
- `ConfigEntryNotReady`: Temporary setup issues (device offline)
|
||||
- `ConfigEntryAuthFailed`: Authentication problems
|
||||
- `ConfigEntryError`: Permanent setup issues
|
||||
- **Try/Catch Best Practices**:
|
||||
- Only wrap code that can throw exceptions
|
||||
- Keep try blocks minimal - process data after the try/catch
|
||||
- **Avoid bare exceptions** except in specific cases:
|
||||
- ❌ Generally not allowed: `except:` or `except Exception:`
|
||||
- ✅ Allowed in config flows to ensure robustness
|
||||
- ✅ Allowed in functions/methods that run in background tasks
|
||||
- Bad pattern:
|
||||
```python
|
||||
try:
|
||||
data = await device.get_data() # Can throw
|
||||
# ❌ Don't process data inside try block
|
||||
processed = data.get("value", 0) * 100
|
||||
self._attr_native_value = processed
|
||||
except DeviceError:
|
||||
_LOGGER.error("Failed to get data")
|
||||
```
|
||||
- Good pattern:
|
||||
```python
|
||||
try:
|
||||
data = await device.get_data() # Can throw
|
||||
except DeviceError:
|
||||
_LOGGER.error("Failed to get data")
|
||||
return
|
||||
|
||||
# ✅ Process data outside try block
|
||||
processed = data.get("value", 0) * 100
|
||||
self._attr_native_value = processed
|
||||
```
|
||||
- **Bare Exception Usage**:
|
||||
```python
|
||||
# ❌ Not allowed in regular code
|
||||
try:
|
||||
data = await device.get_data()
|
||||
except Exception: # Too broad
|
||||
_LOGGER.error("Failed")
|
||||
|
||||
# ✅ Allowed in config flow for robustness
|
||||
async def async_step_user(self, user_input=None):
|
||||
try:
|
||||
await self._test_connection(user_input)
|
||||
except Exception: # Allowed here
|
||||
errors["base"] = "unknown"
|
||||
|
||||
# ✅ Allowed in background tasks
|
||||
async def _background_refresh():
|
||||
try:
|
||||
await coordinator.async_refresh()
|
||||
except Exception: # Allowed in task
|
||||
_LOGGER.exception("Unexpected error in background task")
|
||||
```
|
||||
- **Setup Failure Patterns**:
|
||||
```python
|
||||
try:
|
||||
await device.async_setup()
|
||||
except (asyncio.TimeoutError, TimeoutException) as ex:
|
||||
raise ConfigEntryNotReady(f"Timeout connecting to {device.host}") from ex
|
||||
except AuthFailed as ex:
|
||||
raise ConfigEntryAuthFailed(f"Credentials expired for {device.name}") from ex
|
||||
```
|
||||
|
||||
### Logging
|
||||
- **Format Guidelines**:
|
||||
- No periods at end of messages
|
||||
- No integration names/domains (added automatically)
|
||||
- No sensitive data (keys, tokens, passwords)
|
||||
- Use debug level for non-user-facing messages
|
||||
- **Use Lazy Logging**:
|
||||
```python
|
||||
_LOGGER.debug("This is a log message with %s", variable)
|
||||
```
|
||||
|
||||
### Unavailability Logging
|
||||
- **Log Once**: When device/service becomes unavailable (info level)
|
||||
- **Log Recovery**: When device/service comes back online
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
_unavailable_logged: bool = False
|
||||
|
||||
if not self._unavailable_logged:
|
||||
_LOGGER.info("The sensor is unavailable: %s", ex)
|
||||
self._unavailable_logged = True
|
||||
# On recovery:
|
||||
if self._unavailable_logged:
|
||||
_LOGGER.info("The sensor is back online")
|
||||
self._unavailable_logged = False
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Code Quality & Linting
|
||||
- **Run all linters on all files**: `prek run --all-files`
|
||||
- **Run linters on staged files only**: `prek run`
|
||||
- **PyLint on everything** (slow): `pylint homeassistant`
|
||||
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
|
||||
- **MyPy type checking (whole project)**: `mypy homeassistant/`
|
||||
- **MyPy on specific integration**: `mypy homeassistant/components/my_integration`
|
||||
|
||||
### Testing
|
||||
- **Quick test of changed files**: `pytest --timeout=10 --picked`
|
||||
- **Update test snapshots**: Add `--snapshot-update` to pytest command
|
||||
- ⚠️ Omit test results after using `--snapshot-update`
|
||||
- Always run tests again without the flag to verify snapshots
|
||||
- **Full test suite** (AVOID - very slow): `pytest ./tests`
|
||||
|
||||
### Dependencies & Requirements
|
||||
- **Update generated files after dependency changes**: `python -m script.gen_requirements_all`
|
||||
- **Install all Python requirements**:
|
||||
```bash
|
||||
uv pip install -r requirements_all.txt -r requirements.txt -r requirements_test.txt
|
||||
```
|
||||
- **Install test requirements only**:
|
||||
```bash
|
||||
uv pip install -r requirements_test_all.txt -r requirements.txt
|
||||
```
|
||||
|
||||
### Translations
|
||||
- **Update translations after strings.json changes**:
|
||||
```bash
|
||||
python -m script.translations develop --all
|
||||
```
|
||||
|
||||
### Project Validation
|
||||
- **Run hassfest** (checks project structure and updates generated files):
|
||||
```bash
|
||||
python -m script.hassfest
|
||||
```
|
||||
|
||||
## Common Anti-Patterns & Best Practices
|
||||
|
||||
### ❌ **Avoid These Patterns**
|
||||
```python
|
||||
# Blocking operations in event loop
|
||||
data = requests.get(url) # ❌ Blocks event loop
|
||||
time.sleep(5) # ❌ Blocks event loop
|
||||
|
||||
# Reusing BleakClient instances
|
||||
self.client = BleakClient(address)
|
||||
await self.client.connect()
|
||||
# Later...
|
||||
await self.client.connect() # ❌ Don't reuse
|
||||
|
||||
# Hardcoded strings in code
|
||||
self._attr_name = "Temperature Sensor" # ❌ Not translatable
|
||||
|
||||
# Missing error handling
|
||||
data = await self.api.get_data() # ❌ No exception handling
|
||||
|
||||
# Storing sensitive data in diagnostics
|
||||
return {"api_key": entry.data[CONF_API_KEY]} # ❌ Exposes secrets
|
||||
|
||||
# Accessing hass.data directly in tests
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id] # ❌ Don't access hass.data
|
||||
|
||||
# User-configurable polling intervals
|
||||
# In config flow
|
||||
vol.Optional("scan_interval", default=60): cv.positive_int # ❌ Not allowed
|
||||
# In coordinator
|
||||
update_interval = timedelta(minutes=entry.data.get("scan_interval", 1)) # ❌ Not allowed
|
||||
|
||||
# User-configurable config entry names (non-helper integrations)
|
||||
vol.Optional("name", default="My Device"): cv.string # ❌ Not allowed in regular integrations
|
||||
|
||||
# Too much code in try block
|
||||
try:
|
||||
response = await client.get_data() # Can throw
|
||||
# ❌ Data processing should be outside try block
|
||||
temperature = response["temperature"] / 10
|
||||
humidity = response["humidity"]
|
||||
self._attr_native_value = temperature
|
||||
except ClientError:
|
||||
_LOGGER.error("Failed to fetch data")
|
||||
|
||||
# Bare exceptions in regular code
|
||||
try:
|
||||
value = await sensor.read_value()
|
||||
except Exception: # ❌ Too broad - catch specific exceptions
|
||||
_LOGGER.error("Failed to read sensor")
|
||||
```
|
||||
|
||||
### ✅ **Use These Patterns Instead**
|
||||
```python
|
||||
# Async operations with executor
|
||||
data = await hass.async_add_executor_job(requests.get, url)
|
||||
await asyncio.sleep(5) # ✅ Non-blocking
|
||||
|
||||
# Fresh BleakClient instances
|
||||
client = BleakClient(address) # ✅ New instance each time
|
||||
await client.connect()
|
||||
|
||||
# Translatable entity names
|
||||
_attr_translation_key = "temperature_sensor" # ✅ Translatable
|
||||
|
||||
# Proper error handling
|
||||
try:
|
||||
data = await self.api.get_data()
|
||||
except ApiException as err:
|
||||
raise UpdateFailed(f"API error: {err}") from err
|
||||
|
||||
# Redacted diagnostics data
|
||||
return async_redact_data(data, {"api_key", "password"}) # ✅ Safe
|
||||
|
||||
# Test through proper integration setup and fixtures
|
||||
@pytest.fixture
|
||||
async def init_integration(hass, mock_config_entry, mock_api):
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id) # ✅ Proper setup
|
||||
|
||||
# Integration-determined polling intervals (not user-configurable)
|
||||
SCAN_INTERVAL = timedelta(minutes=5) # ✅ Common pattern: constant in const.py
|
||||
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
# ✅ Integration determines interval based on device capabilities, connection type, etc.
|
||||
interval = timedelta(minutes=1) if client.is_local else SCAN_INTERVAL
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=interval,
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
7
CODEOWNERS
generated
7
CODEOWNERS
generated
@@ -1017,8 +1017,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/mill/ @danielhiversen
|
||||
/homeassistant/components/min_max/ @gjohansson-ST
|
||||
/tests/components/min_max/ @gjohansson-ST
|
||||
/homeassistant/components/minecraft_server/ @elmurato
|
||||
/tests/components/minecraft_server/ @elmurato
|
||||
/homeassistant/components/minecraft_server/ @elmurato @zachdeibert
|
||||
/tests/components/minecraft_server/ @elmurato @zachdeibert
|
||||
/homeassistant/components/minio/ @tkislan
|
||||
/tests/components/minio/ @tkislan
|
||||
/homeassistant/components/moat/ @bdraco
|
||||
@@ -1273,7 +1273,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/prosegur/ @dgomes
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno @erwindouna
|
||||
/tests/components/proxmoxve/ @jhollowe @Corbeno @erwindouna
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
|
||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -30,7 +30,7 @@ RUN \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.9.17
|
||||
&& pip3 install uv==0.9.26
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
@@ -67,8 +67,6 @@ from .const import (
|
||||
BASE_PLATFORMS,
|
||||
FORMAT_DATETIME,
|
||||
KEY_DATA_LOGGING as DATA_LOGGING,
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
REQUIRED_NEXT_PYTHON_VER,
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||
)
|
||||
from .core_config import async_process_ha_core_config
|
||||
@@ -516,38 +514,6 @@ async def async_from_config_dict(
|
||||
|
||||
stop = monotonic()
|
||||
_LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
|
||||
|
||||
if (
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE
|
||||
and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER
|
||||
):
|
||||
current_python_version = ".".join(str(x) for x in sys.version_info[:3])
|
||||
required_python_version = ".".join(str(x) for x in REQUIRED_NEXT_PYTHON_VER[:2])
|
||||
_LOGGER.warning(
|
||||
(
|
||||
"Support for the running Python version %s is deprecated and "
|
||||
"will be removed in Home Assistant %s; "
|
||||
"Please upgrade Python to %s"
|
||||
),
|
||||
current_python_version,
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
required_python_version,
|
||||
)
|
||||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
core.DOMAIN,
|
||||
f"python_version_{required_python_version}",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
translation_key="python_version",
|
||||
translation_placeholders={
|
||||
"current_python_version": current_python_version,
|
||||
"required_python_version": required_python_version,
|
||||
"breaks_in_ha_version": REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
},
|
||||
)
|
||||
|
||||
return hass
|
||||
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ class AdGuardHomeEntity(Entity):
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device information about this AdGuard Home instance."""
|
||||
if self._entry.source == SOURCE_HASSIO:
|
||||
config_url = "homeassistant://hassio/ingress/a0d7b954_adguard"
|
||||
config_url = "homeassistant://app/a0d7b954_adguard"
|
||||
elif self.adguard.tls:
|
||||
config_url = f"https://{self.adguard.host}:{self.adguard.port}"
|
||||
else:
|
||||
|
||||
@@ -8,12 +8,15 @@ from advantage_air import ApiError, advantage_air
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import ADVANTAGE_AIR_RETRY
|
||||
from .const import ADVANTAGE_AIR_RETRY, DOMAIN
|
||||
from .models import AdvantageAirData
|
||||
from .services import async_setup_services
|
||||
|
||||
type AdvantageAirDataConfigEntry = ConfigEntry[AdvantageAirData]
|
||||
|
||||
@@ -32,6 +35,14 @@ PLATFORMS = [
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
REQUEST_REFRESH_DELAY = 0.5
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AdvantageAirDataConfigEntry
|
||||
|
||||
@@ -5,8 +5,6 @@ from __future__ import annotations
|
||||
from decimal import Decimal
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -14,7 +12,6 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AdvantageAirDataConfigEntry
|
||||
@@ -24,7 +21,6 @@ from .models import AdvantageAirData
|
||||
|
||||
ADVANTAGE_AIR_SET_COUNTDOWN_VALUE = "minutes"
|
||||
ADVANTAGE_AIR_SET_COUNTDOWN_UNIT = "min"
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO = "set_time_to"
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -53,13 +49,6 @@ async def async_setup_entry(
|
||||
entities.append(AdvantageAirZoneSignal(instance, ac_key, zone_key))
|
||||
async_add_entities(entities)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
{vol.Required("minutes"): cv.positive_int},
|
||||
"set_time_to",
|
||||
)
|
||||
|
||||
|
||||
class AdvantageAirTimeTo(AdvantageAirAcEntity, SensorEntity):
|
||||
"""Representation of Advantage Air timer control."""
|
||||
|
||||
27
homeassistant/components/advantage_air/services.py
Normal file
27
homeassistant/components/advantage_air/services.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Services for Advantage Air integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO = "set_time_to"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
ADVANTAGE_AIR_SERVICE_SET_TIME_TO,
|
||||
entity_domain=SENSOR_DOMAIN,
|
||||
schema={vol.Required("minutes"): cv.positive_int},
|
||||
func="set_time_to",
|
||||
)
|
||||
@@ -12,6 +12,7 @@ PLATFORMS: list[Platform] = [
|
||||
Platform.CLIMATE,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -63,6 +63,11 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
_attr_min_temp = SETPOINT_TEMP_MIN
|
||||
_attr_max_temp = SETPOINT_TEMP_MAX
|
||||
|
||||
def __init__(self, coordinator) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = coordinator.data.status.device_id
|
||||
|
||||
@property
|
||||
def _status(self) -> ThermostatStatus:
|
||||
"""Get status from coordinator data."""
|
||||
|
||||
@@ -24,8 +24,6 @@ class AirobotEntity(CoordinatorEntity[AirobotDataUpdateCoordinator]):
|
||||
status = coordinator.data.status
|
||||
settings = coordinator.data.settings
|
||||
|
||||
self._attr_unique_id = status.device_id
|
||||
|
||||
connections = set()
|
||||
if (mac := coordinator.config_entry.data.get(CONF_MAC)) is not None:
|
||||
connections.add((CONNECTION_NETWORK_MAC, mac))
|
||||
|
||||
@@ -9,6 +9,14 @@
|
||||
"hysteresis_band": {
|
||||
"default": "mdi:delta"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"actuator_exercise_disabled": {
|
||||
"default": "mdi:valve"
|
||||
},
|
||||
"child_lock": {
|
||||
"default": "mdi:lock"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,6 +85,14 @@
|
||||
"heating_uptime": {
|
||||
"name": "Heating uptime"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"actuator_exercise_disabled": {
|
||||
"name": "Actuator exercise disabled"
|
||||
},
|
||||
"child_lock": {
|
||||
"name": "Child lock"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
@@ -105,6 +113,12 @@
|
||||
},
|
||||
"set_value_failed": {
|
||||
"message": "Failed to set value: {error}"
|
||||
},
|
||||
"switch_turn_off_failed": {
|
||||
"message": "Failed to turn off {switch}."
|
||||
},
|
||||
"switch_turn_on_failed": {
|
||||
"message": "Failed to turn on {switch}."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
118
homeassistant/components/airobot/switch.py
Normal file
118
homeassistant/components/airobot/switch.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""Switch platform for Airobot thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from pyairobotrest.exceptions import AirobotError
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirobotConfigEntry
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirobotDataUpdateCoordinator
|
||||
from .entity import AirobotEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AirobotSwitchEntityDescription(SwitchEntityDescription):
|
||||
"""Describes Airobot switch entity."""
|
||||
|
||||
is_on_fn: Callable[[AirobotDataUpdateCoordinator], bool]
|
||||
turn_on_fn: Callable[[AirobotDataUpdateCoordinator], Coroutine[Any, Any, None]]
|
||||
turn_off_fn: Callable[[AirobotDataUpdateCoordinator], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
SWITCH_TYPES: tuple[AirobotSwitchEntityDescription, ...] = (
|
||||
AirobotSwitchEntityDescription(
|
||||
key="child_lock",
|
||||
translation_key="child_lock",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
is_on_fn=lambda coordinator: (
|
||||
coordinator.data.settings.setting_flags.childlock_enabled
|
||||
),
|
||||
turn_on_fn=lambda coordinator: coordinator.client.set_child_lock(True),
|
||||
turn_off_fn=lambda coordinator: coordinator.client.set_child_lock(False),
|
||||
),
|
||||
AirobotSwitchEntityDescription(
|
||||
key="actuator_exercise_disabled",
|
||||
translation_key="actuator_exercise_disabled",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
is_on_fn=lambda coordinator: (
|
||||
coordinator.data.settings.setting_flags.actuator_exercise_disabled
|
||||
),
|
||||
turn_on_fn=lambda coordinator: coordinator.client.toggle_actuator_exercise(
|
||||
True
|
||||
),
|
||||
turn_off_fn=lambda coordinator: coordinator.client.toggle_actuator_exercise(
|
||||
False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AirobotConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Airobot switch entities."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
AirobotSwitch(coordinator, description) for description in SWITCH_TYPES
|
||||
)
|
||||
|
||||
|
||||
class AirobotSwitch(AirobotEntity, SwitchEntity):
|
||||
"""Representation of an Airobot switch."""
|
||||
|
||||
entity_description: AirobotSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirobotDataUpdateCoordinator,
|
||||
description: AirobotSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.data.status.device_id}_{description.key}"
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the switch is on."""
|
||||
return self.entity_description.is_on_fn(self.coordinator)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
try:
|
||||
await self.entity_description.turn_on_fn(self.coordinator)
|
||||
except AirobotError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="switch_turn_on_failed",
|
||||
translation_placeholders={"switch": self.entity_description.key},
|
||||
) from err
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
try:
|
||||
await self.entity_description.turn_off_fn(self.coordinator)
|
||||
except AirobotError as err:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="switch_turn_off_failed",
|
||||
translation_placeholders={"switch": self.entity_description.key},
|
||||
) from err
|
||||
await self.coordinator.async_request_refresh()
|
||||
93
homeassistant/components/alarm_control_panel/condition.py
Normal file
93
homeassistant/components/alarm_control_panel/condition.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Provides conditions for alarm control panels."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.condition import (
|
||||
Condition,
|
||||
EntityStateConditionBase,
|
||||
make_entity_state_condition,
|
||||
)
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
|
||||
|
||||
def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool:
|
||||
"""Test if an entity supports the specified features."""
|
||||
try:
|
||||
return bool(get_supported_features(hass, entity_id) & features)
|
||||
except HomeAssistantError:
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateRequiredFeaturesCondition(EntityStateConditionBase):
|
||||
"""State condition."""
|
||||
|
||||
_required_features: int
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain with the required features."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if supports_feature(self._hass, entity_id, self._required_features)
|
||||
}
|
||||
|
||||
|
||||
def make_entity_state_required_features_condition(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityStateRequiredFeaturesCondition]:
|
||||
"""Create an entity state condition class with required feature filtering."""
|
||||
|
||||
class CustomCondition(EntityStateRequiredFeaturesCondition):
|
||||
"""Condition for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_states = {to_state}
|
||||
_required_features = required_features
|
||||
|
||||
return CustomCondition
|
||||
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_armed": make_entity_state_condition(
|
||||
DOMAIN,
|
||||
{
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
},
|
||||
),
|
||||
"is_armed_away": make_entity_state_required_features_condition(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelEntityFeature.ARM_AWAY,
|
||||
),
|
||||
"is_armed_home": make_entity_state_required_features_condition(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelEntityFeature.ARM_HOME,
|
||||
),
|
||||
"is_armed_night": make_entity_state_required_features_condition(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT,
|
||||
),
|
||||
"is_armed_vacation": make_entity_state_required_features_condition(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"is_disarmed": make_entity_state_condition(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"is_triggered": make_entity_state_condition(
|
||||
DOMAIN, AlarmControlPanelState.TRIGGERED
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the alarm control panel conditions."""
|
||||
return CONDITIONS
|
||||
52
homeassistant/components/alarm_control_panel/conditions.yaml
Normal file
52
homeassistant/components/alarm_control_panel/conditions.yaml
Normal file
@@ -0,0 +1,52 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields: &condition_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_armed: *condition_common
|
||||
|
||||
is_armed_away:
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
is_armed_home:
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
is_armed_night:
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
is_armed_vacation:
|
||||
fields: *condition_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
|
||||
is_disarmed: *condition_common
|
||||
|
||||
is_triggered: *condition_common
|
||||
@@ -1,4 +1,27 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_armed": {
|
||||
"condition": "mdi:shield"
|
||||
},
|
||||
"is_armed_away": {
|
||||
"condition": "mdi:shield-lock"
|
||||
},
|
||||
"is_armed_home": {
|
||||
"condition": "mdi:shield-home"
|
||||
},
|
||||
"is_armed_night": {
|
||||
"condition": "mdi:shield-moon"
|
||||
},
|
||||
"is_armed_vacation": {
|
||||
"condition": "mdi:shield-airplane"
|
||||
},
|
||||
"is_disarmed": {
|
||||
"condition": "mdi:shield-off"
|
||||
},
|
||||
"is_triggered": {
|
||||
"condition": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:shield",
|
||||
|
||||
@@ -1,8 +1,82 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted alarms.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted alarms to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_armed": {
|
||||
"description": "Tests if one or more alarms are armed.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed"
|
||||
},
|
||||
"is_armed_away": {
|
||||
"description": "Tests if one or more alarms are armed in away mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed away"
|
||||
},
|
||||
"is_armed_home": {
|
||||
"description": "Tests if one or more alarms are armed in home mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed home"
|
||||
},
|
||||
"is_armed_night": {
|
||||
"description": "Tests if one or more alarms are armed in night mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed night"
|
||||
},
|
||||
"is_armed_vacation": {
|
||||
"description": "Tests if one or more alarms are armed in vacation mode.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is armed vacation"
|
||||
},
|
||||
"is_disarmed": {
|
||||
"description": "Tests if one or more alarms are disarmed.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is disarmed"
|
||||
},
|
||||
"is_triggered": {
|
||||
"description": "Tests if one or more alarms are triggered.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Alarm is triggered"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"arm_away": "Arm {entity_name} away",
|
||||
@@ -76,6 +150,12 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -14,7 +14,7 @@ from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelStat
|
||||
|
||||
|
||||
def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
"""Test if an entity supports the specified features."""
|
||||
try:
|
||||
return bool(get_supported_features(hass, entity_id) & features)
|
||||
except HomeAssistantError:
|
||||
@@ -39,7 +39,7 @@ class EntityStateTriggerRequiredFeatures(EntityTargetStateTriggerBase):
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityTargetStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
"""Create an entity state trigger class with required feature filtering."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
@@ -5,9 +5,14 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import logging
|
||||
from random import randrange
|
||||
import sys
|
||||
from typing import Any, cast
|
||||
|
||||
from pyatv import connect, exceptions, scan
|
||||
from pyatv.conf import AppleTV
|
||||
from pyatv.const import DeviceModel, Protocol
|
||||
from pyatv.convert import model_str
|
||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@@ -24,11 +29,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
@@ -42,18 +43,6 @@ from .const import (
|
||||
SIGNAL_DISCONNECTED,
|
||||
)
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
from pyatv import connect, exceptions, scan
|
||||
from pyatv.conf import AppleTV
|
||||
from pyatv.const import DeviceModel, Protocol
|
||||
from pyatv.convert import model_str
|
||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||
else:
|
||||
|
||||
class DeviceListener:
|
||||
"""Dummy class."""
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME_TV = "Apple TV"
|
||||
@@ -62,32 +51,27 @@ DEFAULT_NAME_HP = "HomePod"
|
||||
BACKOFF_TIME_LOWER_LIMIT = 15 # seconds
|
||||
BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||
|
||||
if sys.version_info < (3, 14):
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
OSError,
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
else:
|
||||
AUTH_EXCEPTIONS = ()
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = ()
|
||||
DEVICE_EXCEPTIONS = ()
|
||||
AUTH_EXCEPTIONS = (
|
||||
exceptions.AuthenticationError,
|
||||
exceptions.InvalidCredentialsError,
|
||||
exceptions.NoCredentialsError,
|
||||
)
|
||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||
OSError,
|
||||
asyncio.CancelledError,
|
||||
TimeoutError,
|
||||
exceptions.ConnectionLostError,
|
||||
exceptions.ConnectionFailedError,
|
||||
)
|
||||
DEVICE_EXCEPTIONS = (
|
||||
exceptions.ProtocolError,
|
||||
exceptions.NoServiceError,
|
||||
exceptions.PairingError,
|
||||
exceptions.BackOffError,
|
||||
exceptions.DeviceIdMissingError,
|
||||
)
|
||||
|
||||
|
||||
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
@@ -95,10 +79,6 @@ type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
|
||||
"""Set up a config entry for Apple TV."""
|
||||
if sys.version_info >= (3, 14):
|
||||
raise HomeAssistantError(
|
||||
"Apple TV is not supported on Python 3.14. Please use Python 3.13."
|
||||
)
|
||||
manager = AppleTVManager(hass, entry)
|
||||
|
||||
if manager.is_on:
|
||||
|
||||
63
homeassistant/components/apple_tv/binary_sensor.py
Normal file
63
homeassistant/components/apple_tv/binary_sensor.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Binary sensor support for Apple TV."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyatv.const import KeyboardFocusState
|
||||
from pyatv.interface import AppleTV, KeyboardListener
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorEntity
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AppleTvConfigEntry
|
||||
from .entity import AppleTVEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: AppleTvConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Load Apple TV binary sensor based on a config entry."""
|
||||
# apple_tv config entries always have a unique id
|
||||
assert config_entry.unique_id is not None
|
||||
name: str = config_entry.data[CONF_NAME]
|
||||
manager = config_entry.runtime_data
|
||||
async_add_entities([AppleTVKeyboardFocused(name, config_entry.unique_id, manager)])
|
||||
|
||||
|
||||
class AppleTVKeyboardFocused(AppleTVEntity, BinarySensorEntity, KeyboardListener):
|
||||
"""Binary sensor for Text input focused."""
|
||||
|
||||
_attr_translation_key = "keyboard_focused"
|
||||
_attr_available = True
|
||||
|
||||
@callback
|
||||
def async_device_connected(self, atv: AppleTV) -> None:
|
||||
"""Handle when connection is made to device."""
|
||||
self._attr_available = True
|
||||
# Listen to keyboard updates
|
||||
atv.keyboard.listener = self
|
||||
# Set initial state based on current focus state
|
||||
self._update_state(atv.keyboard.text_focus_state == KeyboardFocusState.Focused)
|
||||
|
||||
@callback
|
||||
def async_device_disconnected(self) -> None:
|
||||
"""Handle when connection was lost to device."""
|
||||
self._attr_available = False
|
||||
self._update_state(False)
|
||||
|
||||
def focusstate_update(
|
||||
self, old_state: KeyboardFocusState, new_state: KeyboardFocusState
|
||||
) -> None:
|
||||
"""Update keyboard state when it changes.
|
||||
|
||||
This is a callback function from pyatv.interface.KeyboardListener.
|
||||
"""
|
||||
self._update_state(new_state == KeyboardFocusState.Focused)
|
||||
|
||||
def _update_state(self, new_state: bool) -> None:
|
||||
"""Update and report."""
|
||||
self._attr_is_on = new_state
|
||||
self.async_write_ha_state()
|
||||
@@ -18,7 +18,6 @@ class AppleTVEntity(Entity):
|
||||
|
||||
_attr_should_poll = False
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
atv: AppleTVInterface | None = None
|
||||
|
||||
def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None:
|
||||
|
||||
12
homeassistant/components/apple_tv/icons.json
Normal file
12
homeassistant/components/apple_tv/icons.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"keyboard_focused": {
|
||||
"default": "mdi:keyboard",
|
||||
"state": {
|
||||
"off": "mdi:keyboard-off"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||
"requirements": ["pyatv==0.17.0"],
|
||||
"zeroconf": [
|
||||
"_mediaremotetv._tcp.local.",
|
||||
"_companion-link._tcp.local.",
|
||||
|
||||
@@ -115,6 +115,7 @@ class AppleTvMediaPlayer(
|
||||
"""Representation of an Apple TV media player."""
|
||||
|
||||
_attr_supported_features = SUPPORT_APPLE_TV
|
||||
_attr_name = None
|
||||
|
||||
def __init__(self, name: str, identifier: str, manager: AppleTVManager) -> None:
|
||||
"""Initialize the Apple TV media player."""
|
||||
@@ -239,6 +240,15 @@ class AppleTvMediaPlayer(
|
||||
"""
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def volume_device_update(
|
||||
self, output_device: OutputDevice, old_level: float, new_level: float
|
||||
) -> None:
|
||||
"""Output device volume was updated.
|
||||
|
||||
This is a callback function from pyatv.interface.AudioListener.
|
||||
"""
|
||||
|
||||
@callback
|
||||
def outputdevices_update(
|
||||
self, old_devices: list[OutputDevice], new_devices: list[OutputDevice]
|
||||
|
||||
@@ -51,6 +51,8 @@ async def async_setup_entry(
|
||||
class AppleTVRemote(AppleTVEntity, RemoteEntity):
|
||||
"""Device that sends commands to an Apple TV."""
|
||||
|
||||
_attr_name = None
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if device is on."""
|
||||
|
||||
@@ -62,6 +62,13 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"keyboard_focused": {
|
||||
"name": "Keyboard focus"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
|
||||
@@ -2,14 +2,35 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import ArveConfigEntry, ArveCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
|
||||
"""Migrate entry."""
|
||||
_LOGGER.debug("Migrating from version %s.%s", entry.version, entry.minor_version)
|
||||
|
||||
if entry.version == 1:
|
||||
# 1 -> 1.2: Unique ID from integer to string
|
||||
if entry.minor_version == 1:
|
||||
minor_version = 2
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=str(entry.unique_id), minor_version=minor_version
|
||||
)
|
||||
|
||||
_LOGGER.debug("Migration successful")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ArveConfigEntry) -> bool:
|
||||
"""Set up Arve from a config entry."""
|
||||
|
||||
|
||||
@@ -19,6 +19,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class ArveConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Arve."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -35,7 +38,7 @@ class ArveConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
except ArveConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(customer.customerId)
|
||||
await self.async_set_unique_id(str(customer.customerId))
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title="Arve",
|
||||
|
||||
23
homeassistant/components/assist_satellite/condition.py
Normal file
23
homeassistant/components/assist_satellite/condition.py
Normal file
@@ -0,0 +1,23 @@
|
||||
"""Provides conditions for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_idle": make_entity_state_condition(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"is_listening": make_entity_state_condition(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"is_processing": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.PROCESSING
|
||||
),
|
||||
"is_responding": make_entity_state_condition(
|
||||
DOMAIN, AssistSatelliteState.RESPONDING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the assist satellite conditions."""
|
||||
return CONDITIONS
|
||||
19
homeassistant/components/assist_satellite/conditions.yaml
Normal file
19
homeassistant/components/assist_satellite/conditions.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_idle: *condition_common
|
||||
is_listening: *condition_common
|
||||
is_processing: *condition_common
|
||||
is_responding: *condition_common
|
||||
@@ -1,4 +1,18 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_idle": {
|
||||
"condition": "mdi:chat-sleep"
|
||||
},
|
||||
"is_listening": {
|
||||
"condition": "mdi:chat-question"
|
||||
},
|
||||
"is_processing": {
|
||||
"condition": "mdi:chat-processing"
|
||||
},
|
||||
"is_responding": {
|
||||
"condition": "mdi:chat-alert"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:account-voice"
|
||||
|
||||
@@ -1,8 +1,52 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted Assist satellites.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted Assist satellites to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_idle": {
|
||||
"description": "Tests if one or more Assist satellites are idle.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is idle"
|
||||
},
|
||||
"is_listening": {
|
||||
"description": "Tests if one or more Assist satellites are listening.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is listening"
|
||||
},
|
||||
"is_processing": {
|
||||
"description": "Tests if one or more Assist satellites are processing.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is processing"
|
||||
},
|
||||
"is_responding": {
|
||||
"description": "Tests if one or more Assist satellites are responding.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Satellite is responding"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "Assist satellite",
|
||||
@@ -21,6 +65,12 @@
|
||||
"sentences": "Sentences"
|
||||
}
|
||||
},
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -56,7 +56,7 @@ from homeassistant.core import (
|
||||
valid_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceNotFound, TemplateError
|
||||
from homeassistant.helpers import condition, config_validation as cv
|
||||
from homeassistant.helpers import condition as condition_helper, config_validation as cv
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -123,8 +123,12 @@ SERVICE_TRIGGER = "trigger"
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
|
||||
|
||||
_EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"alarm_control_panel",
|
||||
"assist_satellite",
|
||||
"device_tracker",
|
||||
"fan",
|
||||
"light",
|
||||
"siren",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
@@ -551,7 +555,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
automation_id: str | None,
|
||||
name: str,
|
||||
trigger_config: list[ConfigType],
|
||||
cond_func: IfAction | None,
|
||||
condition: IfAction | None,
|
||||
action_script: Script,
|
||||
initial_state: bool | None,
|
||||
variables: ScriptVariables | None,
|
||||
@@ -564,7 +568,7 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
self._attr_name = name
|
||||
self._trigger_config = trigger_config
|
||||
self._async_detach_triggers: CALLBACK_TYPE | None = None
|
||||
self._cond_func = cond_func
|
||||
self._condition = condition
|
||||
self.action_script = action_script
|
||||
self.action_script.change_listener = self.async_write_ha_state
|
||||
self._initial_state = initial_state
|
||||
@@ -599,6 +603,12 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced labels."""
|
||||
referenced = self.action_script.referenced_labels
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(
|
||||
conf, ATTR_LABEL_ID
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_LABEL_ID))
|
||||
return referenced
|
||||
@@ -608,6 +618,12 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced floors."""
|
||||
referenced = self.action_script.referenced_floors
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(
|
||||
conf, ATTR_FLOOR_ID
|
||||
)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_FLOOR_ID))
|
||||
return referenced
|
||||
@@ -617,6 +633,10 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced areas."""
|
||||
referenced = self.action_script.referenced_areas
|
||||
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_targets(conf, ATTR_AREA_ID)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_get_targets_from_trigger_config(conf, ATTR_AREA_ID))
|
||||
return referenced
|
||||
@@ -633,9 +653,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced devices."""
|
||||
referenced = self.action_script.referenced_devices
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_devices(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_devices(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
referenced |= set(_trigger_extract_devices(conf))
|
||||
@@ -647,9 +667,9 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
"""Return a set of referenced entities."""
|
||||
referenced = self.action_script.referenced_entities
|
||||
|
||||
if self._cond_func is not None:
|
||||
for conf in self._cond_func.config:
|
||||
referenced |= condition.async_extract_entities(conf)
|
||||
if self._condition is not None:
|
||||
for conf in self._condition.config:
|
||||
referenced |= condition_helper.async_extract_entities(conf)
|
||||
|
||||
for conf in self._trigger_config:
|
||||
for entity_id in _trigger_extract_entities(conf):
|
||||
@@ -769,8 +789,8 @@ class AutomationEntity(BaseAutomationEntity, RestoreEntity):
|
||||
|
||||
if (
|
||||
not skip_condition
|
||||
and self._cond_func is not None
|
||||
and not self._cond_func(variables)
|
||||
and self._condition is not None
|
||||
and not self._condition(variables)
|
||||
):
|
||||
self._logger.debug(
|
||||
"Conditions not met, aborting automation. Condition summary: %s",
|
||||
@@ -1032,12 +1052,12 @@ async def _create_automation_entities(
|
||||
)
|
||||
|
||||
if CONF_CONDITIONS in config_block:
|
||||
cond_func = await _async_process_if(hass, name, config_block)
|
||||
condition = await _async_process_if(hass, name, config_block)
|
||||
|
||||
if cond_func is None:
|
||||
if condition is None:
|
||||
continue
|
||||
else:
|
||||
cond_func = None
|
||||
condition = None
|
||||
|
||||
# Add trigger variables to variables
|
||||
variables = None
|
||||
@@ -1055,7 +1075,7 @@ async def _create_automation_entities(
|
||||
automation_id,
|
||||
name,
|
||||
config_block[CONF_TRIGGERS],
|
||||
cond_func,
|
||||
condition,
|
||||
action_script,
|
||||
initial_state,
|
||||
variables,
|
||||
@@ -1197,7 +1217,7 @@ async def _async_process_if(
|
||||
if_configs = config[CONF_CONDITIONS]
|
||||
|
||||
try:
|
||||
if_action = await condition.async_conditions_from_config(
|
||||
if_action = await condition_helper.async_conditions_from_config(
|
||||
hass, if_configs, LOGGER, name
|
||||
)
|
||||
except HomeAssistantError as ex:
|
||||
|
||||
@@ -17,10 +17,12 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_MODEL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.ssl import get_default_context
|
||||
|
||||
from .const import DOMAIN
|
||||
from .services import async_setup_services
|
||||
from .websocket import BeoWebsocket
|
||||
|
||||
|
||||
@@ -41,6 +43,14 @@ PLATFORMS = [
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BeoConfigEntry) -> bool:
|
||||
"""Set up from a config entry."""
|
||||
|
||||
@@ -38,7 +38,6 @@ from mozart_api.models import (
|
||||
VolumeState,
|
||||
)
|
||||
from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -56,17 +55,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_MODEL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import BeoConfigEntry
|
||||
@@ -74,7 +66,6 @@ from .const import (
|
||||
BEO_REPEAT_FROM_HA,
|
||||
BEO_REPEAT_TO_HA,
|
||||
BEO_STATES,
|
||||
BEOLINK_JOIN_SOURCES,
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER,
|
||||
CONF_BEOLINK_JID,
|
||||
CONNECTION_STATUS,
|
||||
@@ -129,61 +120,6 @@ async def async_setup_entry(
|
||||
update_before_add=True,
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
platform = async_get_current_platform()
|
||||
|
||||
jid_regex = vol.Match(
|
||||
r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$"
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_join",
|
||||
schema={
|
||||
vol.Optional("beolink_jid"): jid_regex,
|
||||
vol.Optional("source_id"): vol.In(BEOLINK_JOIN_SOURCES),
|
||||
},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_expand",
|
||||
schema={
|
||||
vol.Exclusive("all_discovered", "devices", ""): cv.boolean,
|
||||
vol.Exclusive(
|
||||
"beolink_jids",
|
||||
"devices",
|
||||
"Define either specific Beolink JIDs or all discovered",
|
||||
): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_expand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_unexpand",
|
||||
schema={
|
||||
vol.Required("beolink_jids"): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_unexpand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_leave",
|
||||
schema=None,
|
||||
func="async_beolink_leave",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_allstandby",
|
||||
schema=None,
|
||||
func="async_beolink_allstandby",
|
||||
)
|
||||
|
||||
|
||||
class BeoMediaPlayer(BeoEntity, MediaPlayerEntity):
|
||||
"""Representation of a media player."""
|
||||
|
||||
83
homeassistant/components/bang_olufsen/services.py
Normal file
83
homeassistant/components/bang_olufsen/services.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Services for Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import BEOLINK_JOIN_SOURCES, DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Home Assistant services."""
|
||||
|
||||
jid_regex = vol.Match(
|
||||
r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$"
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"beolink_join",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Optional("beolink_jid"): jid_regex,
|
||||
vol.Optional("source_id"): vol.In(BEOLINK_JOIN_SOURCES),
|
||||
},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"beolink_expand",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Exclusive("all_discovered", "devices", ""): cv.boolean,
|
||||
vol.Exclusive(
|
||||
"beolink_jids",
|
||||
"devices",
|
||||
"Define either specific Beolink JIDs or all discovered",
|
||||
): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_expand",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"beolink_unexpand",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema={
|
||||
vol.Required("beolink_jids"): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_unexpand",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"beolink_leave",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_beolink_leave",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"beolink_allstandby",
|
||||
entity_domain=MEDIA_PLAYER_DOMAIN,
|
||||
schema=None,
|
||||
func="async_beolink_allstandby",
|
||||
)
|
||||
@@ -85,9 +85,9 @@
|
||||
}
|
||||
},
|
||||
"moving": {
|
||||
"default": "mdi:arrow-right",
|
||||
"default": "mdi:octagon",
|
||||
"state": {
|
||||
"on": "mdi:octagon"
|
||||
"on": "mdi:arrow-right"
|
||||
}
|
||||
},
|
||||
"occupancy": {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""BleBox sensor entities."""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import blebox_uniapi.sensor
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -146,7 +148,7 @@ class BleBoxSensorEntity(BleBoxEntity[blebox_uniapi.sensor.BaseSensor], SensorEn
|
||||
return self._feature.native_value
|
||||
|
||||
@property
|
||||
def last_reset(self):
|
||||
def last_reset(self) -> datetime | None:
|
||||
"""Return the time when the sensor was last reset, if implemented."""
|
||||
native_implementation = getattr(self._feature, "last_reset", None)
|
||||
|
||||
|
||||
@@ -64,6 +64,7 @@ def _ws_with_blueprint_domain(
|
||||
return with_domain_blueprints
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/list",
|
||||
@@ -97,6 +98,7 @@ async def ws_list_blueprints(
|
||||
connection.send_result(msg["id"], results)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/import",
|
||||
@@ -150,6 +152,7 @@ async def ws_import_blueprint(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/save",
|
||||
@@ -206,6 +209,7 @@ async def ws_save_blueprint(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/delete",
|
||||
@@ -233,6 +237,7 @@ async def ws_delete_blueprint(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "blueprint/substitute",
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.9.0"],
|
||||
"requirements": ["hass-nabucasa==1.11.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["compit"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["compit-inext-api==0.3.4"]
|
||||
"requirements": ["compit-inext-api==0.4.2"]
|
||||
}
|
||||
|
||||
@@ -49,11 +49,11 @@ def setup_platform(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Concord232 alarm control panel platform."""
|
||||
name = config[CONF_NAME]
|
||||
code = config.get(CONF_CODE)
|
||||
mode = config[CONF_MODE]
|
||||
host = config[CONF_HOST]
|
||||
port = config[CONF_PORT]
|
||||
name: str = config[CONF_NAME]
|
||||
code: str | None = config.get(CONF_CODE)
|
||||
mode: str = config[CONF_MODE]
|
||||
host: str = config[CONF_HOST]
|
||||
port: int = config[CONF_PORT]
|
||||
|
||||
url = f"http://{host}:{port}"
|
||||
|
||||
@@ -72,7 +72,7 @@ class Concord232Alarm(AlarmControlPanelEntity):
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
)
|
||||
|
||||
def __init__(self, url, name, code, mode):
|
||||
def __init__(self, url: str, name: str, code: str | None, mode: str) -> None:
|
||||
"""Initialize the Concord232 alarm panel."""
|
||||
|
||||
self._attr_name = name
|
||||
@@ -125,7 +125,7 @@ class Concord232Alarm(AlarmControlPanelEntity):
|
||||
return
|
||||
self._alarm.arm("away")
|
||||
|
||||
def _validate_code(self, code, state):
|
||||
def _validate_code(self, code: str | None, state: AlarmControlPanelState) -> bool:
|
||||
"""Validate given code."""
|
||||
if self._code is None:
|
||||
return True
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from concord232 import client as concord232_client
|
||||
import requests
|
||||
@@ -29,8 +30,7 @@ CONF_ZONE_TYPES = "zone_types"
|
||||
|
||||
DEFAULT_HOST = "localhost"
|
||||
DEFAULT_NAME = "Alarm"
|
||||
DEFAULT_PORT = "5007"
|
||||
DEFAULT_SSL = False
|
||||
DEFAULT_PORT = 5007
|
||||
|
||||
SCAN_INTERVAL = datetime.timedelta(seconds=10)
|
||||
|
||||
@@ -56,10 +56,10 @@ def setup_platform(
|
||||
) -> None:
|
||||
"""Set up the Concord232 binary sensor platform."""
|
||||
|
||||
host = config[CONF_HOST]
|
||||
port = config[CONF_PORT]
|
||||
exclude = config[CONF_EXCLUDE_ZONES]
|
||||
zone_types = config[CONF_ZONE_TYPES]
|
||||
host: str = config[CONF_HOST]
|
||||
port: int = config[CONF_PORT]
|
||||
exclude: list[int] = config[CONF_EXCLUDE_ZONES]
|
||||
zone_types: dict[int, BinarySensorDeviceClass] = config[CONF_ZONE_TYPES]
|
||||
sensors = []
|
||||
|
||||
try:
|
||||
@@ -84,7 +84,6 @@ def setup_platform(
|
||||
if zone["number"] not in exclude:
|
||||
sensors.append(
|
||||
Concord232ZoneSensor(
|
||||
hass,
|
||||
client,
|
||||
zone,
|
||||
zone_types.get(zone["number"], get_opening_type(zone)),
|
||||
@@ -110,26 +109,25 @@ def get_opening_type(zone):
|
||||
class Concord232ZoneSensor(BinarySensorEntity):
|
||||
"""Representation of a Concord232 zone as a sensor."""
|
||||
|
||||
def __init__(self, hass, client, zone, zone_type):
|
||||
def __init__(
|
||||
self,
|
||||
client: concord232_client.Client,
|
||||
zone: dict[str, Any],
|
||||
zone_type: BinarySensorDeviceClass,
|
||||
) -> None:
|
||||
"""Initialize the Concord232 binary sensor."""
|
||||
self._hass = hass
|
||||
self._client = client
|
||||
self._zone = zone
|
||||
self._number = zone["number"]
|
||||
self._zone_type = zone_type
|
||||
self._attr_device_class = zone_type
|
||||
|
||||
@property
|
||||
def device_class(self) -> BinarySensorDeviceClass:
|
||||
"""Return the class of this sensor, from DEVICE_CLASSES."""
|
||||
return self._zone_type
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of the binary sensor."""
|
||||
return self._zone["name"]
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
# True means "faulted" or "open" or "abnormal state"
|
||||
return bool(self._zone["state"] != "Normal")
|
||||
@@ -145,5 +143,5 @@ class Concord232ZoneSensor(BinarySensorEntity):
|
||||
|
||||
if hasattr(self._client, "zones"):
|
||||
self._zone = next(
|
||||
(x for x in self._client.zones if x["number"] == self._number), None
|
||||
x for x in self._client.zones if x["number"] == self._number
|
||||
)
|
||||
|
||||
@@ -10,7 +10,7 @@ LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "deconz"
|
||||
|
||||
HASSIO_CONFIGURATION_URL = "homeassistant://hassio/ingress/core_deconz"
|
||||
HASSIO_CONFIGURATION_URL = "homeassistant://app/core_deconz"
|
||||
|
||||
CONF_BRIDGE_ID = "bridgeid"
|
||||
CONF_GROUP_ID_BASE = "group_id_base"
|
||||
|
||||
17
homeassistant/components/device_tracker/condition.py
Normal file
17
homeassistant/components/device_tracker/condition.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides conditions for device trackers."""
|
||||
|
||||
from homeassistant.const import STATE_HOME, STATE_NOT_HOME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.condition import Condition, make_entity_state_condition
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
CONDITIONS: dict[str, type[Condition]] = {
|
||||
"is_home": make_entity_state_condition(DOMAIN, STATE_HOME),
|
||||
"is_not_home": make_entity_state_condition(DOMAIN, STATE_NOT_HOME),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
||||
"""Return the conditions for device trackers."""
|
||||
return CONDITIONS
|
||||
17
homeassistant/components/device_tracker/conditions.yaml
Normal file
17
homeassistant/components/device_tracker/conditions.yaml
Normal file
@@ -0,0 +1,17 @@
|
||||
.condition_common: &condition_common
|
||||
target:
|
||||
entity:
|
||||
domain: device_tracker
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: condition_behavior
|
||||
options:
|
||||
- all
|
||||
- any
|
||||
|
||||
is_home: *condition_common
|
||||
is_not_home: *condition_common
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"conditions": {
|
||||
"is_home": {
|
||||
"condition": "mdi:account"
|
||||
},
|
||||
"is_not_home": {
|
||||
"condition": "mdi:account-arrow-right"
|
||||
}
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"default": "mdi:account",
|
||||
|
||||
@@ -1,8 +1,32 @@
|
||||
{
|
||||
"common": {
|
||||
"condition_behavior_description": "How the state should match on the targeted device trackers.",
|
||||
"condition_behavior_name": "Behavior",
|
||||
"trigger_behavior_description": "The behavior of the targeted device trackers to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"conditions": {
|
||||
"is_home": {
|
||||
"description": "Tests if one or more device trackers are home.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::device_tracker::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Device tracker is home"
|
||||
},
|
||||
"is_not_home": {
|
||||
"description": "Tests if one or more device trackers are not home.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::device_tracker::common::condition_behavior_description%]",
|
||||
"name": "[%key:component::device_tracker::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Device tracker is not home"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
"condition_type": {
|
||||
"is_home": "{entity_name} is home",
|
||||
@@ -49,6 +73,12 @@
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"condition_behavior": {
|
||||
"options": {
|
||||
"all": "All",
|
||||
"any": "Any"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for Digital Ocean."""
|
||||
|
||||
from datetime import timedelta
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import digitalocean
|
||||
@@ -12,27 +13,12 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .const import DATA_DIGITAL_OCEAN, DOMAIN, MIN_TIME_BETWEEN_UPDATES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_CREATED_AT = "created_at"
|
||||
ATTR_DROPLET_ID = "droplet_id"
|
||||
ATTR_DROPLET_NAME = "droplet_name"
|
||||
ATTR_FEATURES = "features"
|
||||
ATTR_IPV4_ADDRESS = "ipv4_address"
|
||||
ATTR_IPV6_ADDRESS = "ipv6_address"
|
||||
ATTR_MEMORY = "memory"
|
||||
ATTR_REGION = "region"
|
||||
ATTR_VCPUS = "vcpus"
|
||||
|
||||
ATTRIBUTION = "Data provided by Digital Ocean"
|
||||
|
||||
CONF_DROPLETS = "droplets"
|
||||
|
||||
DATA_DIGITAL_OCEAN = "data_do"
|
||||
DIGITAL_OCEAN_PLATFORMS = [Platform.SWITCH, Platform.BINARY_SENSOR]
|
||||
DOMAIN = "digital_ocean"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Schema({vol.Required(CONF_ACCESS_TOKEN): cv.string})},
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -16,7 +17,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
from .const import (
|
||||
ATTR_CREATED_AT,
|
||||
ATTR_DROPLET_ID,
|
||||
ATTR_DROPLET_NAME,
|
||||
@@ -65,6 +66,7 @@ class DigitalOceanBinarySensor(BinarySensorEntity):
|
||||
"""Representation of a Digital Ocean droplet sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_device_class = BinarySensorDeviceClass.MOVING
|
||||
|
||||
def __init__(self, do, droplet_id):
|
||||
"""Initialize a new Digital Ocean sensor."""
|
||||
@@ -79,17 +81,12 @@ class DigitalOceanBinarySensor(BinarySensorEntity):
|
||||
return self.data.name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self.data.status == "active"
|
||||
|
||||
@property
|
||||
def device_class(self) -> BinarySensorDeviceClass:
|
||||
"""Return the class of this sensor."""
|
||||
return BinarySensorDeviceClass.MOVING
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the Digital Ocean droplet."""
|
||||
return {
|
||||
ATTR_CREATED_AT: self.data.created_at,
|
||||
|
||||
30
homeassistant/components/digital_ocean/const.py
Normal file
30
homeassistant/components/digital_ocean/const.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Support for Digital Ocean."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import DigitalOcean
|
||||
|
||||
ATTR_CREATED_AT = "created_at"
|
||||
ATTR_DROPLET_ID = "droplet_id"
|
||||
ATTR_DROPLET_NAME = "droplet_name"
|
||||
ATTR_FEATURES = "features"
|
||||
ATTR_IPV4_ADDRESS = "ipv4_address"
|
||||
ATTR_IPV6_ADDRESS = "ipv6_address"
|
||||
ATTR_MEMORY = "memory"
|
||||
ATTR_REGION = "region"
|
||||
ATTR_VCPUS = "vcpus"
|
||||
|
||||
ATTRIBUTION = "Data provided by Digital Ocean"
|
||||
|
||||
CONF_DROPLETS = "droplets"
|
||||
|
||||
DOMAIN = "digital_ocean"
|
||||
DATA_DIGITAL_OCEAN: HassKey[DigitalOcean] = HassKey(DOMAIN)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
|
||||
@@ -16,7 +16,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
from .const import (
|
||||
ATTR_CREATED_AT,
|
||||
ATTR_DROPLET_ID,
|
||||
ATTR_DROPLET_NAME,
|
||||
@@ -80,12 +80,12 @@ class DigitalOceanSwitch(SwitchEntity):
|
||||
return self.data.name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if switch is on."""
|
||||
return self.data.status == "active"
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes of the Digital Ocean droplet."""
|
||||
return {
|
||||
ATTR_CREATED_AT: self.data.created_at,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Support for Ebusd daemon for communication with eBUS heating systems."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import ebusdpy
|
||||
import voluptuous as vol
|
||||
@@ -17,7 +18,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import load_platform
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, SENSOR_TYPES
|
||||
from .const import DOMAIN, EBUSD_DATA, SENSOR_TYPES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,9 +29,9 @@ CACHE_TTL = 900
|
||||
SERVICE_EBUSD_WRITE = "ebusd_write"
|
||||
|
||||
|
||||
def verify_ebusd_config(config):
|
||||
def verify_ebusd_config(config: ConfigType) -> ConfigType:
|
||||
"""Verify eBusd config."""
|
||||
circuit = config[CONF_CIRCUIT]
|
||||
circuit: str = config[CONF_CIRCUIT]
|
||||
for condition in config[CONF_MONITORED_CONDITIONS]:
|
||||
if condition not in SENSOR_TYPES[circuit]:
|
||||
raise vol.Invalid(f"Condition '{condition}' not in '{circuit}'.")
|
||||
@@ -59,17 +60,17 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the eBusd component."""
|
||||
_LOGGER.debug("Integration setup started")
|
||||
conf = config[DOMAIN]
|
||||
name = conf[CONF_NAME]
|
||||
circuit = conf[CONF_CIRCUIT]
|
||||
monitored_conditions = conf.get(CONF_MONITORED_CONDITIONS)
|
||||
server_address = (conf.get(CONF_HOST), conf.get(CONF_PORT))
|
||||
conf: ConfigType = config[DOMAIN]
|
||||
name: str = conf[CONF_NAME]
|
||||
circuit: str = conf[CONF_CIRCUIT]
|
||||
monitored_conditions: list[str] = conf[CONF_MONITORED_CONDITIONS]
|
||||
server_address: tuple[str, int] = (conf[CONF_HOST], conf[CONF_PORT])
|
||||
|
||||
try:
|
||||
ebusdpy.init(server_address)
|
||||
except (TimeoutError, OSError):
|
||||
return False
|
||||
hass.data[DOMAIN] = EbusdData(server_address, circuit)
|
||||
hass.data[EBUSD_DATA] = EbusdData(server_address, circuit)
|
||||
sensor_config = {
|
||||
CONF_MONITORED_CONDITIONS: monitored_conditions,
|
||||
"client_name": name,
|
||||
@@ -77,7 +78,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
}
|
||||
load_platform(hass, Platform.SENSOR, DOMAIN, sensor_config, config)
|
||||
|
||||
hass.services.register(DOMAIN, SERVICE_EBUSD_WRITE, hass.data[DOMAIN].write)
|
||||
hass.services.register(DOMAIN, SERVICE_EBUSD_WRITE, hass.data[EBUSD_DATA].write)
|
||||
|
||||
_LOGGER.debug("Ebusd integration setup completed")
|
||||
return True
|
||||
@@ -86,13 +87,13 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
class EbusdData:
|
||||
"""Get the latest data from Ebusd."""
|
||||
|
||||
def __init__(self, address, circuit):
|
||||
def __init__(self, address: tuple[str, int], circuit: str) -> None:
|
||||
"""Initialize the data object."""
|
||||
self._circuit = circuit
|
||||
self._address = address
|
||||
self.value = {}
|
||||
self.value: dict[str, Any] = {}
|
||||
|
||||
def update(self, name, stype):
|
||||
def update(self, name: str, stype: int) -> None:
|
||||
"""Call the Ebusd API to update the data."""
|
||||
try:
|
||||
_LOGGER.debug("Opening socket to ebusd %s", name)
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
"""Constants for ebus component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.const import (
|
||||
PERCENTAGE,
|
||||
@@ -8,277 +12,283 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import EbusdData
|
||||
|
||||
DOMAIN = "ebusd"
|
||||
EBUSD_DATA: HassKey[EbusdData] = HassKey(DOMAIN)
|
||||
|
||||
# SensorTypes from ebusdpy module :
|
||||
# 0='decimal', 1='time-schedule', 2='switch', 3='string', 4='value;status'
|
||||
|
||||
SENSOR_TYPES = {
|
||||
type SensorSpecs = tuple[str, str | None, str | None, int, SensorDeviceClass | None]
|
||||
SENSOR_TYPES: dict[str, dict[str, SensorSpecs]] = {
|
||||
"700": {
|
||||
"ActualFlowTemperatureDesired": [
|
||||
"ActualFlowTemperatureDesired": (
|
||||
"Hc1ActualFlowTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"MaxFlowTemperatureDesired": [
|
||||
),
|
||||
"MaxFlowTemperatureDesired": (
|
||||
"Hc1MaxFlowTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"MinFlowTemperatureDesired": [
|
||||
),
|
||||
"MinFlowTemperatureDesired": (
|
||||
"Hc1MinFlowTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"PumpStatus": ["Hc1PumpStatus", None, "mdi:toggle-switch", 2, None],
|
||||
"HCSummerTemperatureLimit": [
|
||||
),
|
||||
"PumpStatus": ("Hc1PumpStatus", None, "mdi:toggle-switch", 2, None),
|
||||
"HCSummerTemperatureLimit": (
|
||||
"Hc1SummerTempLimit",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
"mdi:weather-sunny",
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"HolidayTemperature": [
|
||||
),
|
||||
"HolidayTemperature": (
|
||||
"HolidayTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"HWTemperatureDesired": [
|
||||
),
|
||||
"HWTemperatureDesired": (
|
||||
"HwcTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"HWActualTemperature": [
|
||||
),
|
||||
"HWActualTemperature": (
|
||||
"HwcStorageTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"HWTimerMonday": ["hwcTimer.Monday", None, "mdi:timer-outline", 1, None],
|
||||
"HWTimerTuesday": ["hwcTimer.Tuesday", None, "mdi:timer-outline", 1, None],
|
||||
"HWTimerWednesday": ["hwcTimer.Wednesday", None, "mdi:timer-outline", 1, None],
|
||||
"HWTimerThursday": ["hwcTimer.Thursday", None, "mdi:timer-outline", 1, None],
|
||||
"HWTimerFriday": ["hwcTimer.Friday", None, "mdi:timer-outline", 1, None],
|
||||
"HWTimerSaturday": ["hwcTimer.Saturday", None, "mdi:timer-outline", 1, None],
|
||||
"HWTimerSunday": ["hwcTimer.Sunday", None, "mdi:timer-outline", 1, None],
|
||||
"HWOperativeMode": ["HwcOpMode", None, "mdi:math-compass", 3, None],
|
||||
"WaterPressure": [
|
||||
),
|
||||
"HWTimerMonday": ("hwcTimer.Monday", None, "mdi:timer-outline", 1, None),
|
||||
"HWTimerTuesday": ("hwcTimer.Tuesday", None, "mdi:timer-outline", 1, None),
|
||||
"HWTimerWednesday": ("hwcTimer.Wednesday", None, "mdi:timer-outline", 1, None),
|
||||
"HWTimerThursday": ("hwcTimer.Thursday", None, "mdi:timer-outline", 1, None),
|
||||
"HWTimerFriday": ("hwcTimer.Friday", None, "mdi:timer-outline", 1, None),
|
||||
"HWTimerSaturday": ("hwcTimer.Saturday", None, "mdi:timer-outline", 1, None),
|
||||
"HWTimerSunday": ("hwcTimer.Sunday", None, "mdi:timer-outline", 1, None),
|
||||
"HWOperativeMode": ("HwcOpMode", None, "mdi:math-compass", 3, None),
|
||||
"WaterPressure": (
|
||||
"WaterPressure",
|
||||
UnitOfPressure.BAR,
|
||||
"mdi:water-pump",
|
||||
0,
|
||||
SensorDeviceClass.PRESSURE,
|
||||
],
|
||||
"Zone1RoomZoneMapping": ["z1RoomZoneMapping", None, "mdi:label", 0, None],
|
||||
"Zone1NightTemperature": [
|
||||
),
|
||||
"Zone1RoomZoneMapping": ("z1RoomZoneMapping", None, "mdi:label", 0, None),
|
||||
"Zone1NightTemperature": (
|
||||
"z1NightTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
"mdi:weather-night",
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"Zone1DayTemperature": [
|
||||
),
|
||||
"Zone1DayTemperature": (
|
||||
"z1DayTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
"mdi:weather-sunny",
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"Zone1HolidayTemperature": [
|
||||
),
|
||||
"Zone1HolidayTemperature": (
|
||||
"z1HolidayTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"Zone1RoomTemperature": [
|
||||
),
|
||||
"Zone1RoomTemperature": (
|
||||
"z1RoomTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"Zone1ActualRoomTemperatureDesired": [
|
||||
),
|
||||
"Zone1ActualRoomTemperatureDesired": (
|
||||
"z1ActualRoomTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"Zone1TimerMonday": ["z1Timer.Monday", None, "mdi:timer-outline", 1, None],
|
||||
"Zone1TimerTuesday": ["z1Timer.Tuesday", None, "mdi:timer-outline", 1, None],
|
||||
"Zone1TimerWednesday": [
|
||||
),
|
||||
"Zone1TimerMonday": ("z1Timer.Monday", None, "mdi:timer-outline", 1, None),
|
||||
"Zone1TimerTuesday": ("z1Timer.Tuesday", None, "mdi:timer-outline", 1, None),
|
||||
"Zone1TimerWednesday": (
|
||||
"z1Timer.Wednesday",
|
||||
None,
|
||||
"mdi:timer-outline",
|
||||
1,
|
||||
None,
|
||||
],
|
||||
"Zone1TimerThursday": ["z1Timer.Thursday", None, "mdi:timer-outline", 1, None],
|
||||
"Zone1TimerFriday": ["z1Timer.Friday", None, "mdi:timer-outline", 1, None],
|
||||
"Zone1TimerSaturday": ["z1Timer.Saturday", None, "mdi:timer-outline", 1, None],
|
||||
"Zone1TimerSunday": ["z1Timer.Sunday", None, "mdi:timer-outline", 1, None],
|
||||
"Zone1OperativeMode": ["z1OpMode", None, "mdi:math-compass", 3, None],
|
||||
"ContinuosHeating": [
|
||||
),
|
||||
"Zone1TimerThursday": ("z1Timer.Thursday", None, "mdi:timer-outline", 1, None),
|
||||
"Zone1TimerFriday": ("z1Timer.Friday", None, "mdi:timer-outline", 1, None),
|
||||
"Zone1TimerSaturday": ("z1Timer.Saturday", None, "mdi:timer-outline", 1, None),
|
||||
"Zone1TimerSunday": ("z1Timer.Sunday", None, "mdi:timer-outline", 1, None),
|
||||
"Zone1OperativeMode": ("z1OpMode", None, "mdi:math-compass", 3, None),
|
||||
"ContinuosHeating": (
|
||||
"ContinuosHeating",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
"mdi:weather-snowy",
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"PowerEnergyConsumptionLastMonth": [
|
||||
),
|
||||
"PowerEnergyConsumptionLastMonth": (
|
||||
"PrEnergySumHcLastMonth",
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
"mdi:flash",
|
||||
0,
|
||||
SensorDeviceClass.ENERGY,
|
||||
],
|
||||
"PowerEnergyConsumptionThisMonth": [
|
||||
),
|
||||
"PowerEnergyConsumptionThisMonth": (
|
||||
"PrEnergySumHcThisMonth",
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
"mdi:flash",
|
||||
0,
|
||||
SensorDeviceClass.ENERGY,
|
||||
],
|
||||
),
|
||||
},
|
||||
"ehp": {
|
||||
"HWTemperature": [
|
||||
"HWTemperature": (
|
||||
"HwcTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"OutsideTemp": [
|
||||
),
|
||||
"OutsideTemp": (
|
||||
"OutsideTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
),
|
||||
},
|
||||
"bai": {
|
||||
"HotWaterTemperature": [
|
||||
"HotWaterTemperature": (
|
||||
"HwcTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"StorageTemperature": [
|
||||
),
|
||||
"StorageTemperature": (
|
||||
"StorageTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"DesiredStorageTemperature": [
|
||||
),
|
||||
"DesiredStorageTemperature": (
|
||||
"StorageTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"OutdoorsTemperature": [
|
||||
),
|
||||
"OutdoorsTemperature": (
|
||||
"OutdoorstempSensor",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"WaterPressure": [
|
||||
),
|
||||
"WaterPressure": (
|
||||
"WaterPressure",
|
||||
UnitOfPressure.BAR,
|
||||
"mdi:pipe",
|
||||
4,
|
||||
SensorDeviceClass.PRESSURE,
|
||||
],
|
||||
"AverageIgnitionTime": [
|
||||
),
|
||||
"AverageIgnitionTime": (
|
||||
"averageIgnitiontime",
|
||||
UnitOfTime.SECONDS,
|
||||
"mdi:av-timer",
|
||||
0,
|
||||
SensorDeviceClass.DURATION,
|
||||
],
|
||||
"MaximumIgnitionTime": [
|
||||
),
|
||||
"MaximumIgnitionTime": (
|
||||
"maxIgnitiontime",
|
||||
UnitOfTime.SECONDS,
|
||||
"mdi:av-timer",
|
||||
0,
|
||||
SensorDeviceClass.DURATION,
|
||||
],
|
||||
"MinimumIgnitionTime": [
|
||||
),
|
||||
"MinimumIgnitionTime": (
|
||||
"minIgnitiontime",
|
||||
UnitOfTime.SECONDS,
|
||||
"mdi:av-timer",
|
||||
0,
|
||||
SensorDeviceClass.DURATION,
|
||||
],
|
||||
"ReturnTemperature": [
|
||||
),
|
||||
"ReturnTemperature": (
|
||||
"ReturnTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"CentralHeatingPump": ["WP", None, "mdi:toggle-switch", 2, None],
|
||||
"HeatingSwitch": ["HeatingSwitch", None, "mdi:toggle-switch", 2, None],
|
||||
"DesiredFlowTemperature": [
|
||||
),
|
||||
"CentralHeatingPump": ("WP", None, "mdi:toggle-switch", 2, None),
|
||||
"HeatingSwitch": ("HeatingSwitch", None, "mdi:toggle-switch", 2, None),
|
||||
"DesiredFlowTemperature": (
|
||||
"FlowTempDesired",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
0,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"FlowTemperature": [
|
||||
),
|
||||
"FlowTemperature": (
|
||||
"FlowTemp",
|
||||
UnitOfTemperature.CELSIUS,
|
||||
None,
|
||||
4,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
],
|
||||
"Flame": ["Flame", None, "mdi:toggle-switch", 2, None],
|
||||
"PowerEnergyConsumptionHeatingCircuit": [
|
||||
),
|
||||
"Flame": ("Flame", None, "mdi:toggle-switch", 2, None),
|
||||
"PowerEnergyConsumptionHeatingCircuit": (
|
||||
"PrEnergySumHc1",
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
"mdi:flash",
|
||||
0,
|
||||
SensorDeviceClass.ENERGY,
|
||||
],
|
||||
"PowerEnergyConsumptionHotWaterCircuit": [
|
||||
),
|
||||
"PowerEnergyConsumptionHotWaterCircuit": (
|
||||
"PrEnergySumHwc1",
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
"mdi:flash",
|
||||
0,
|
||||
SensorDeviceClass.ENERGY,
|
||||
],
|
||||
"RoomThermostat": ["DCRoomthermostat", None, "mdi:toggle-switch", 2, None],
|
||||
"HeatingPartLoad": [
|
||||
),
|
||||
"RoomThermostat": ("DCRoomthermostat", None, "mdi:toggle-switch", 2, None),
|
||||
"HeatingPartLoad": (
|
||||
"PartloadHcKW",
|
||||
UnitOfEnergy.KILO_WATT_HOUR,
|
||||
"mdi:flash",
|
||||
0,
|
||||
SensorDeviceClass.ENERGY,
|
||||
],
|
||||
"StateNumber": ["StateNumber", None, "mdi:fire", 3, None],
|
||||
"ModulationPercentage": [
|
||||
),
|
||||
"StateNumber": ("StateNumber", None, "mdi:fire", 3, None),
|
||||
"ModulationPercentage": (
|
||||
"ModulationTempDesired",
|
||||
PERCENTAGE,
|
||||
"mdi:percent",
|
||||
0,
|
||||
None,
|
||||
],
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -11,7 +12,8 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import Throttle, dt as dt_util
|
||||
|
||||
from .const import DOMAIN
|
||||
from . import EbusdData
|
||||
from .const import EBUSD_DATA, SensorSpecs
|
||||
|
||||
TIME_FRAME1_BEGIN = "time_frame1_begin"
|
||||
TIME_FRAME1_END = "time_frame1_end"
|
||||
@@ -33,9 +35,9 @@ def setup_platform(
|
||||
"""Set up the Ebus sensor."""
|
||||
if not discovery_info:
|
||||
return
|
||||
ebusd_api = hass.data[DOMAIN]
|
||||
monitored_conditions = discovery_info["monitored_conditions"]
|
||||
name = discovery_info["client_name"]
|
||||
ebusd_api = hass.data[EBUSD_DATA]
|
||||
monitored_conditions: list[str] = discovery_info["monitored_conditions"]
|
||||
name: str = discovery_info["client_name"]
|
||||
|
||||
add_entities(
|
||||
(
|
||||
@@ -49,9 +51,8 @@ def setup_platform(
|
||||
class EbusdSensor(SensorEntity):
|
||||
"""Ebusd component sensor methods definition."""
|
||||
|
||||
def __init__(self, data, sensor, name):
|
||||
def __init__(self, data: EbusdData, sensor: SensorSpecs, name: str) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._state = None
|
||||
self._client_name = name
|
||||
(
|
||||
self._name,
|
||||
@@ -63,20 +64,15 @@ class EbusdSensor(SensorEntity):
|
||||
self.data = data
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
def name(self) -> str:
|
||||
"""Return the name of the sensor."""
|
||||
return f"{self._client_name} {self._name}"
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the device state attributes."""
|
||||
if self._type == 1 and self._state is not None:
|
||||
schedule = {
|
||||
if self._type == 1 and (native_value := self.native_value) is not None:
|
||||
schedule: dict[str, str | None] = {
|
||||
TIME_FRAME1_BEGIN: None,
|
||||
TIME_FRAME1_END: None,
|
||||
TIME_FRAME2_BEGIN: None,
|
||||
@@ -84,7 +80,7 @@ class EbusdSensor(SensorEntity):
|
||||
TIME_FRAME3_BEGIN: None,
|
||||
TIME_FRAME3_END: None,
|
||||
}
|
||||
time_frame = self._state.split(";")
|
||||
time_frame = cast(str, native_value).split(";")
|
||||
for index, item in enumerate(sorted(schedule.items())):
|
||||
if index < len(time_frame):
|
||||
parsed = datetime.datetime.strptime(time_frame[index], "%H:%M")
|
||||
@@ -101,12 +97,12 @@ class EbusdSensor(SensorEntity):
|
||||
return self._device_class
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
def icon(self) -> str | None:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self):
|
||||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
@@ -118,6 +114,6 @@ class EbusdSensor(SensorEntity):
|
||||
if self._name not in self.data.value:
|
||||
return
|
||||
|
||||
self._state = self.data.value[self._name]
|
||||
self._attr_native_value = self.data.value[self._name]
|
||||
except RuntimeError:
|
||||
_LOGGER.debug("EbusdData.update exception")
|
||||
|
||||
@@ -18,6 +18,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, discovery
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -35,7 +36,7 @@ DEFAULT_REPORT_SERVER_PORT = 52010
|
||||
DEFAULT_VERSION = "GATE-01"
|
||||
DOMAIN = "egardia"
|
||||
|
||||
EGARDIA_DEVICE = "egardiadevice"
|
||||
EGARDIA_DEVICE: HassKey[egardiadevice.EgardiaDevice] = HassKey(DOMAIN)
|
||||
EGARDIA_NAME = "egardianame"
|
||||
EGARDIA_REPORT_SERVER_CODES = "egardia_rs_codes"
|
||||
EGARDIA_REPORT_SERVER_ENABLED = "egardia_rs_enabled"
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from pythonegardia.egardiadevice import EgardiaDevice
|
||||
import requests
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
@@ -11,6 +12,7 @@ from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanelEntityFeature,
|
||||
AlarmControlPanelState,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@@ -47,10 +49,10 @@ def setup_platform(
|
||||
if discovery_info is None:
|
||||
return
|
||||
device = EgardiaAlarm(
|
||||
discovery_info["name"],
|
||||
discovery_info[CONF_NAME],
|
||||
hass.data[EGARDIA_DEVICE],
|
||||
discovery_info[CONF_REPORT_SERVER_ENABLED],
|
||||
discovery_info.get(CONF_REPORT_SERVER_CODES),
|
||||
discovery_info[CONF_REPORT_SERVER_CODES],
|
||||
discovery_info[CONF_REPORT_SERVER_PORT],
|
||||
)
|
||||
|
||||
@@ -67,8 +69,13 @@ class EgardiaAlarm(AlarmControlPanelEntity):
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, name, egardiasystem, rs_enabled=False, rs_codes=None, rs_port=52010
|
||||
):
|
||||
self,
|
||||
name: str,
|
||||
egardiasystem: EgardiaDevice,
|
||||
rs_enabled: bool,
|
||||
rs_codes: dict[str, list[str]],
|
||||
rs_port: int,
|
||||
) -> None:
|
||||
"""Initialize the Egardia alarm."""
|
||||
self._attr_name = name
|
||||
self._egardiasystem = egardiasystem
|
||||
@@ -85,9 +92,7 @@ class EgardiaAlarm(AlarmControlPanelEntity):
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Poll if no report server is enabled."""
|
||||
if not self._rs_enabled:
|
||||
return True
|
||||
return False
|
||||
return not self._rs_enabled
|
||||
|
||||
def handle_status_event(self, event):
|
||||
"""Handle the Egardia system status event."""
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pythonegardia.egardiadevice import EgardiaDevice
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
)
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
@@ -51,30 +52,20 @@ async def async_setup_platform(
|
||||
class EgardiaBinarySensor(BinarySensorEntity):
|
||||
"""Represents a sensor based on an Egardia sensor (IR, Door Contact)."""
|
||||
|
||||
def __init__(self, sensor_id, name, egardia_system, device_class):
|
||||
def __init__(
|
||||
self,
|
||||
sensor_id: str,
|
||||
name: str,
|
||||
egardia_system: EgardiaDevice,
|
||||
device_class: BinarySensorDeviceClass | None,
|
||||
) -> None:
|
||||
"""Initialize the sensor device."""
|
||||
self._id = sensor_id
|
||||
self._name = name
|
||||
self._state = None
|
||||
self._device_class = device_class
|
||||
self._attr_name = name
|
||||
self._attr_device_class = device_class
|
||||
self._egardia_system = egardia_system
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the status."""
|
||||
egardia_input = self._egardia_system.getsensorstate(self._id)
|
||||
self._state = STATE_ON if egardia_input else STATE_OFF
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Whether the device is switched on."""
|
||||
return self._state == STATE_ON
|
||||
|
||||
@property
|
||||
def device_class(self) -> BinarySensorDeviceClass | None:
|
||||
"""Return the device class."""
|
||||
return self._device_class
|
||||
self._attr_is_on = bool(egardia_input)
|
||||
|
||||
@@ -18,12 +18,13 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.discovery import async_load_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "envisalink"
|
||||
|
||||
DATA_EVL = "envisalink"
|
||||
DATA_EVL: HassKey[EnvisalinkAlarmPanel] = HassKey(DOMAIN)
|
||||
|
||||
CONF_EVL_KEEPALIVE = "keepalive_interval"
|
||||
CONF_EVL_PORT = "port"
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenvisalink import EnvisalinkAlarmPanel
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
@@ -22,6 +24,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from . import (
|
||||
CONF_PANIC,
|
||||
CONF_PARTITIONNAME,
|
||||
CONF_PARTITIONS,
|
||||
DATA_EVL,
|
||||
DOMAIN,
|
||||
PARTITION_SCHEMA,
|
||||
@@ -51,15 +54,14 @@ async def async_setup_platform(
|
||||
"""Perform the setup for Envisalink alarm panels."""
|
||||
if not discovery_info:
|
||||
return
|
||||
configured_partitions = discovery_info["partitions"]
|
||||
code = discovery_info[CONF_CODE]
|
||||
panic_type = discovery_info[CONF_PANIC]
|
||||
configured_partitions: dict[int, dict[str, Any]] = discovery_info[CONF_PARTITIONS]
|
||||
code: str | None = discovery_info[CONF_CODE]
|
||||
panic_type: str = discovery_info[CONF_PANIC]
|
||||
|
||||
entities = []
|
||||
for part_num in configured_partitions:
|
||||
entity_config_data = PARTITION_SCHEMA(configured_partitions[part_num])
|
||||
for part_num, part_config in configured_partitions.items():
|
||||
entity_config_data = PARTITION_SCHEMA(part_config)
|
||||
entity = EnvisalinkAlarm(
|
||||
hass,
|
||||
part_num,
|
||||
entity_config_data[CONF_PARTITIONNAME],
|
||||
code,
|
||||
@@ -103,8 +105,14 @@ class EnvisalinkAlarm(EnvisalinkEntity, AlarmControlPanelEntity):
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self, hass, partition_number, alarm_name, code, panic_type, info, controller
|
||||
):
|
||||
self,
|
||||
partition_number: int,
|
||||
alarm_name: str,
|
||||
code: str | None,
|
||||
panic_type: str,
|
||||
info: dict[str, Any],
|
||||
controller: EnvisalinkAlarmPanel,
|
||||
) -> None:
|
||||
"""Initialize the alarm panel."""
|
||||
self._partition_number = partition_number
|
||||
self._panic_type = panic_type
|
||||
|
||||
@@ -4,6 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenvisalink import EnvisalinkAlarmPanel
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -16,7 +19,14 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import CONF_ZONENAME, CONF_ZONETYPE, DATA_EVL, SIGNAL_ZONE_UPDATE, ZONE_SCHEMA
|
||||
from . import (
|
||||
CONF_ZONENAME,
|
||||
CONF_ZONES,
|
||||
CONF_ZONETYPE,
|
||||
DATA_EVL,
|
||||
SIGNAL_ZONE_UPDATE,
|
||||
ZONE_SCHEMA,
|
||||
)
|
||||
from .entity import EnvisalinkEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -31,13 +41,12 @@ async def async_setup_platform(
|
||||
"""Set up the Envisalink binary sensor entities."""
|
||||
if not discovery_info:
|
||||
return
|
||||
configured_zones = discovery_info["zones"]
|
||||
configured_zones: dict[int, dict[str, Any]] = discovery_info[CONF_ZONES]
|
||||
|
||||
entities = []
|
||||
for zone_num in configured_zones:
|
||||
entity_config_data = ZONE_SCHEMA(configured_zones[zone_num])
|
||||
for zone_num, zone_data in configured_zones.items():
|
||||
entity_config_data = ZONE_SCHEMA(zone_data)
|
||||
entity = EnvisalinkBinarySensor(
|
||||
hass,
|
||||
zone_num,
|
||||
entity_config_data[CONF_ZONENAME],
|
||||
entity_config_data[CONF_ZONETYPE],
|
||||
@@ -52,9 +61,16 @@ async def async_setup_platform(
|
||||
class EnvisalinkBinarySensor(EnvisalinkEntity, BinarySensorEntity):
|
||||
"""Representation of an Envisalink binary sensor."""
|
||||
|
||||
def __init__(self, hass, zone_number, zone_name, zone_type, info, controller):
|
||||
def __init__(
|
||||
self,
|
||||
zone_number: int,
|
||||
zone_name: str,
|
||||
zone_type: BinarySensorDeviceClass,
|
||||
info: dict[str, Any],
|
||||
controller: EnvisalinkAlarmPanel,
|
||||
) -> None:
|
||||
"""Initialize the binary_sensor."""
|
||||
self._zone_type = zone_type
|
||||
self._attr_device_class = zone_type
|
||||
self._zone_number = zone_number
|
||||
|
||||
_LOGGER.debug("Setting up zone: %s", zone_name)
|
||||
@@ -69,9 +85,9 @@ class EnvisalinkBinarySensor(EnvisalinkEntity, BinarySensorEntity):
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
attr = {}
|
||||
attr: dict[str, Any] = {}
|
||||
|
||||
# The Envisalink library returns a "last_fault" value that's the
|
||||
# number of seconds since the last fault, up to a maximum of 327680
|
||||
@@ -104,11 +120,6 @@ class EnvisalinkBinarySensor(EnvisalinkEntity, BinarySensorEntity):
|
||||
"""Return true if sensor is on."""
|
||||
return self._info["status"]["open"]
|
||||
|
||||
@property
|
||||
def device_class(self) -> BinarySensorDeviceClass:
|
||||
"""Return the class of this sensor, from DEVICE_CLASSES."""
|
||||
return self._zone_type
|
||||
|
||||
@callback
|
||||
def async_update_callback(self, zone):
|
||||
"""Update the zone's state, if needed."""
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
"""Support for Envisalink devices."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyenvisalink import EnvisalinkAlarmPanel
|
||||
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
|
||||
@@ -8,13 +12,10 @@ class EnvisalinkEntity(Entity):
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(self, name, info, controller):
|
||||
def __init__(
|
||||
self, name: str, info: dict[str, Any], controller: EnvisalinkAlarmPanel
|
||||
) -> None:
|
||||
"""Initialize the device."""
|
||||
self._controller = controller
|
||||
self._info = info
|
||||
self._name = name
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the device."""
|
||||
return self._name
|
||||
self._attr_name = name
|
||||
|
||||
@@ -3,6 +3,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenvisalink import EnvisalinkAlarmPanel
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -12,6 +15,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import (
|
||||
CONF_PARTITIONNAME,
|
||||
CONF_PARTITIONS,
|
||||
DATA_EVL,
|
||||
PARTITION_SCHEMA,
|
||||
SIGNAL_KEYPAD_UPDATE,
|
||||
@@ -31,13 +35,12 @@ async def async_setup_platform(
|
||||
"""Perform the setup for Envisalink sensor entities."""
|
||||
if not discovery_info:
|
||||
return
|
||||
configured_partitions = discovery_info["partitions"]
|
||||
configured_partitions: dict[int, dict[str, Any]] = discovery_info[CONF_PARTITIONS]
|
||||
|
||||
entities = []
|
||||
for part_num in configured_partitions:
|
||||
entity_config_data = PARTITION_SCHEMA(configured_partitions[part_num])
|
||||
for part_num, part_config in configured_partitions.items():
|
||||
entity_config_data = PARTITION_SCHEMA(part_config)
|
||||
entity = EnvisalinkSensor(
|
||||
hass,
|
||||
entity_config_data[CONF_PARTITIONNAME],
|
||||
part_num,
|
||||
hass.data[DATA_EVL].alarm_state["partition"][part_num],
|
||||
@@ -52,9 +55,16 @@ async def async_setup_platform(
|
||||
class EnvisalinkSensor(EnvisalinkEntity, SensorEntity):
|
||||
"""Representation of an Envisalink keypad."""
|
||||
|
||||
def __init__(self, hass, partition_name, partition_number, info, controller):
|
||||
_attr_icon = "mdi:alarm"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
partition_name: str,
|
||||
partition_number: int,
|
||||
info: dict[str, Any],
|
||||
controller: EnvisalinkAlarmPanel,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._icon = "mdi:alarm"
|
||||
self._partition_number = partition_number
|
||||
|
||||
_LOGGER.debug("Setting up sensor for partition: %s", partition_name)
|
||||
@@ -73,11 +83,6 @@ class EnvisalinkSensor(EnvisalinkEntity, SensorEntity):
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon if any."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the overall state."""
|
||||
|
||||
@@ -5,13 +5,21 @@ from __future__ import annotations
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyenvisalink import EnvisalinkAlarmPanel
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import CONF_ZONENAME, DATA_EVL, SIGNAL_ZONE_BYPASS_UPDATE, ZONE_SCHEMA
|
||||
from . import (
|
||||
CONF_ZONENAME,
|
||||
CONF_ZONES,
|
||||
DATA_EVL,
|
||||
SIGNAL_ZONE_BYPASS_UPDATE,
|
||||
ZONE_SCHEMA,
|
||||
)
|
||||
from .entity import EnvisalinkEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -26,16 +34,15 @@ async def async_setup_platform(
|
||||
"""Set up the Envisalink switch entities."""
|
||||
if not discovery_info:
|
||||
return
|
||||
configured_zones = discovery_info["zones"]
|
||||
configured_zones: dict[int, dict[str, Any]] = discovery_info[CONF_ZONES]
|
||||
|
||||
entities = []
|
||||
for zone_num in configured_zones:
|
||||
entity_config_data = ZONE_SCHEMA(configured_zones[zone_num])
|
||||
for zone_num, zone_data in configured_zones.items():
|
||||
entity_config_data = ZONE_SCHEMA(zone_data)
|
||||
zone_name = f"{entity_config_data[CONF_ZONENAME]}_bypass"
|
||||
_LOGGER.debug("Setting up zone_bypass switch: %s", zone_name)
|
||||
|
||||
entity = EnvisalinkSwitch(
|
||||
hass,
|
||||
zone_num,
|
||||
zone_name,
|
||||
hass.data[DATA_EVL].alarm_state["zone"][zone_num],
|
||||
@@ -49,7 +56,13 @@ async def async_setup_platform(
|
||||
class EnvisalinkSwitch(EnvisalinkEntity, SwitchEntity):
|
||||
"""Representation of an Envisalink switch."""
|
||||
|
||||
def __init__(self, hass, zone_number, zone_name, info, controller):
|
||||
def __init__(
|
||||
self,
|
||||
zone_number: int,
|
||||
zone_name: str,
|
||||
info: dict[str, Any],
|
||||
controller: EnvisalinkAlarmPanel,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
self._zone_number = zone_number
|
||||
|
||||
|
||||
@@ -1034,7 +1034,7 @@ def _async_setup_device_registry(
|
||||
and dashboard.data
|
||||
and dashboard.data.get(device_info.name)
|
||||
):
|
||||
configuration_url = f"homeassistant://hassio/ingress/{dashboard.addon_slug}"
|
||||
configuration_url = f"homeassistant://app/{dashboard.addon_slug}"
|
||||
|
||||
manufacturer = "espressif"
|
||||
if device_info.manufacturer:
|
||||
|
||||
@@ -7,7 +7,7 @@ from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "essent"
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=12)
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=1)
|
||||
ATTRIBUTION: Final = "Data provided by Essent"
|
||||
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"name": "[%key:component::fan::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a fan is off"
|
||||
"name": "Fan is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more fans are on.",
|
||||
@@ -24,7 +24,7 @@
|
||||
"name": "[%key:component::fan::common::condition_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "If a fan is on"
|
||||
"name": "Fan is on"
|
||||
}
|
||||
},
|
||||
"device_automation": {
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.11"]
|
||||
"requirements": ["pyfirefly==0.1.12"]
|
||||
}
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260107.1"]
|
||||
"requirements": ["home-assistant-frontend==20260107.2"]
|
||||
}
|
||||
|
||||
@@ -7,20 +7,8 @@
|
||||
"benzene": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"nitrogen_dioxide": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"nitrogen_monoxide": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"ozone": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"sulphur_dioxide": {
|
||||
"default": "mdi:molecule"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==2.1.2"]
|
||||
"requirements": ["google_air_quality_api==3.0.0"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,11 @@ from homeassistant.components.sensor import (
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -114,6 +118,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.co.concentration.units,
|
||||
exists_fn=lambda x: "co" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.co.concentration.value,
|
||||
suggested_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="nh3",
|
||||
@@ -133,24 +138,24 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="no",
|
||||
translation_key="nitrogen_monoxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.NITROGEN_MONOXIDE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.no.concentration.units,
|
||||
value_fn=lambda x: x.pollutants.no.concentration.value,
|
||||
exists_fn=lambda x: "no" in {p.code for p in x.pollutants},
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="no2",
|
||||
translation_key="nitrogen_dioxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.NITROGEN_DIOXIDE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.no2.concentration.units,
|
||||
exists_fn=lambda x: "no2" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.no2.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="o3",
|
||||
translation_key="ozone",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.OZONE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.o3.concentration.units,
|
||||
exists_fn=lambda x: "o3" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.o3.concentration.value,
|
||||
@@ -173,8 +178,8 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="so2",
|
||||
translation_key="sulphur_dioxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.SULPHUR_DIOXIDE,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.so2.concentration.units,
|
||||
exists_fn=lambda x: "so2" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.so2.concentration.value,
|
||||
|
||||
@@ -205,21 +205,9 @@
|
||||
"so2": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
}
|
||||
},
|
||||
"nitrogen_dioxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
|
||||
},
|
||||
"nitrogen_monoxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]"
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"name": "Non-methane hydrocarbons"
|
||||
},
|
||||
"ozone": {
|
||||
"name": "[%key:component::sensor::entity_component::ozone::name%]"
|
||||
},
|
||||
"sulphur_dioxide": {
|
||||
"name": "[%key:component::sensor::entity_component::sulphur_dioxide::name%]"
|
||||
},
|
||||
"uaqi": {
|
||||
"name": "Universal Air Quality Index"
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/google_generative_ai_conversation",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["google-genai==1.56.0"]
|
||||
"requirements": ["google-genai==1.59.0"]
|
||||
}
|
||||
|
||||
@@ -83,6 +83,9 @@
|
||||
"invalid_credentials": "Input is incomplete. You must provide either your login details or an API token",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "[%key:common::config_flow::initiate_flow::account%]"
|
||||
},
|
||||
"step": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
|
||||
@@ -16,7 +16,7 @@ from aiohasupervisor.models import GreenOptions, YellowOptions # noqa: F401
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
from homeassistant.components import panel_custom
|
||||
from homeassistant.components import frontend, panel_custom
|
||||
from homeassistant.components.homeassistant import async_set_stop_handler
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
@@ -292,6 +292,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
return False
|
||||
|
||||
async_load_websocket_api(hass)
|
||||
frontend.async_register_built_in_panel(hass, "app")
|
||||
|
||||
host = os.environ["SUPERVISOR"]
|
||||
websession = async_get_clientsession(hass)
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components import frontend, panel_custom
|
||||
from homeassistant.components import frontend
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.const import ATTR_ICON
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -33,7 +33,7 @@ async def async_setup_addon_panel(hass: HomeAssistant, hassio: HassIO) -> None:
|
||||
# _register_panel never suspends and is only
|
||||
# a coroutine because it would be a breaking change
|
||||
# to make it a normal function
|
||||
await _register_panel(hass, addon, data)
|
||||
_register_panel(hass, addon, data)
|
||||
|
||||
|
||||
class HassIOAddonPanel(HomeAssistantView):
|
||||
@@ -58,7 +58,7 @@ class HassIOAddonPanel(HomeAssistantView):
|
||||
data = panels[addon]
|
||||
|
||||
# Register panel
|
||||
await _register_panel(self.hass, addon, data)
|
||||
_register_panel(self.hass, addon, data)
|
||||
return web.Response()
|
||||
|
||||
async def delete(self, request: web.Request, addon: str) -> web.Response:
|
||||
@@ -76,18 +76,14 @@ class HassIOAddonPanel(HomeAssistantView):
|
||||
return {}
|
||||
|
||||
|
||||
async def _register_panel(
|
||||
hass: HomeAssistant, addon: str, data: dict[str, Any]
|
||||
) -> None:
|
||||
def _register_panel(hass: HomeAssistant, addon: str, data: dict[str, Any]):
|
||||
"""Init coroutine to register the panel."""
|
||||
await panel_custom.async_register_panel(
|
||||
frontend.async_register_built_in_panel(
|
||||
hass,
|
||||
"app",
|
||||
frontend_url_path=addon,
|
||||
webcomponent_name="hassio-main",
|
||||
sidebar_title=data[ATTR_TITLE],
|
||||
sidebar_icon=data[ATTR_ICON],
|
||||
js_url="/api/hassio/app/entrypoint.js",
|
||||
embed_iframe=True,
|
||||
require_admin=data[ATTR_ADMIN],
|
||||
config={"ingress": addon},
|
||||
config={"addon": addon},
|
||||
)
|
||||
|
||||
@@ -19,6 +19,8 @@ from .const import DOMAIN
|
||||
from .coordinator import HDFuryConfigEntry
|
||||
from .entity import HDFuryEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class HDFuryButtonEntityDescription(ButtonEntityDescription):
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hdfury",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["hdfury==1.3.1"]
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["hdfury==1.4.2"]
|
||||
}
|
||||
|
||||
@@ -35,11 +35,11 @@ rules:
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: Integration has no authentication flow.
|
||||
test-coverage: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -20,6 +20,8 @@ from .const import DOMAIN
|
||||
from .coordinator import HDFuryConfigEntry, HDFuryCoordinator
|
||||
from .entity import HDFuryEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class HDFurySelectEntityDescription(SelectEntityDescription):
|
||||
@@ -77,13 +79,11 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entities: list[HDFuryEntity] = []
|
||||
|
||||
for description in SELECT_PORTS:
|
||||
if description.key not in coordinator.data.info:
|
||||
continue
|
||||
|
||||
entities.append(HDFurySelect(coordinator, description))
|
||||
entities: list[HDFuryEntity] = [
|
||||
HDFurySelect(coordinator, description)
|
||||
for description in SELECT_PORTS
|
||||
if description.key in coordinator.data.info
|
||||
]
|
||||
|
||||
# Add OPMODE select if present
|
||||
if "opmode" in coordinator.data.info:
|
||||
|
||||
@@ -8,6 +8,8 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from .coordinator import HDFuryConfigEntry
|
||||
from .entity import HDFuryEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="RX0",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user