mirror of
https://github.com/home-assistant/core.git
synced 2026-03-06 22:14:58 +01:00
Compare commits
191 Commits
frenck-202
...
gha-builde
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36cb3e21fe | ||
|
|
f645b232f9 | ||
|
|
e8454d9b2c | ||
|
|
5fe2ab93ff | ||
|
|
0e4698eb99 | ||
|
|
698c5eca00 | ||
|
|
c7776057b7 | ||
|
|
02ae9b2f71 | ||
|
|
e87c677cc4 | ||
|
|
c3858a0841 | ||
|
|
42bc5c3a5f | ||
|
|
76bc58da2c | ||
|
|
fc8719ce35 | ||
|
|
60a4a97d9c | ||
|
|
284721e1df | ||
|
|
bfa707d79e | ||
|
|
633e2e7469 | ||
|
|
ad1c6846e7 | ||
|
|
f75140b626 | ||
|
|
f83757da7c | ||
|
|
ca338c98f3 | ||
|
|
18a8afb017 | ||
|
|
f6f7390063 | ||
|
|
bfa1fd7f1b | ||
|
|
0136e9c7eb | ||
|
|
d88c736016 | ||
|
|
780dc178a1 | ||
|
|
b7ba945dfc | ||
|
|
01de7052af | ||
|
|
3fe6a31ee9 | ||
|
|
95570643ec | ||
|
|
e3210b0ab9 | ||
|
|
2edabf903a | ||
|
|
0e4e703b64 | ||
|
|
88624f5179 | ||
|
|
4a5fdfc0ec | ||
|
|
c6e91afae4 | ||
|
|
db5e7e4521 | ||
|
|
25489c224b | ||
|
|
c4f64598a0 | ||
|
|
59e579cf5a | ||
|
|
831c28cf2c | ||
|
|
be1affc6ba | ||
|
|
94a25b5688 | ||
|
|
382940d661 | ||
|
|
b8e1c0cf2c | ||
|
|
0d23d8dc09 | ||
|
|
b750de1e3e | ||
|
|
7d7e8e0bde | ||
|
|
d6f355355f | ||
|
|
5dad64e54c | ||
|
|
c311ff0464 | ||
|
|
c45675a01f | ||
|
|
9d92141812 | ||
|
|
501b973a98 | ||
|
|
fd4d8137da | ||
|
|
33881c1912 | ||
|
|
9bdb03dbe8 | ||
|
|
d2178ba458 | ||
|
|
06cdf3c5d2 | ||
|
|
84c994ab80 | ||
|
|
1d5913d7a5 | ||
|
|
05acba37c7 | ||
|
|
7496406156 | ||
|
|
543f2b1396 | ||
|
|
3df2bbda80 | ||
|
|
b661d37a86 | ||
|
|
2102babc6d | ||
|
|
f3a1cab582 | ||
|
|
03c9ce25c8 | ||
|
|
8fcabcec16 | ||
|
|
2a33096074 | ||
|
|
14a9eada09 | ||
|
|
4a00f78e90 | ||
|
|
abef46864e | ||
|
|
73b28f1ee2 | ||
|
|
7379d41393 | ||
|
|
89acb02519 | ||
|
|
e343e90da2 | ||
|
|
e9a576494b | ||
|
|
4e047b56d8 | ||
|
|
a1e95c483d | ||
|
|
9cb6e02c5f | ||
|
|
2c75e3289a | ||
|
|
348012a6b8 | ||
|
|
e0db00e089 | ||
|
|
b2280198d9 | ||
|
|
9cc4a3e427 | ||
|
|
f94a075641 | ||
|
|
f1856e6ef6 | ||
|
|
ed35bafa6c | ||
|
|
66e16d728b | ||
|
|
a806efa7e2 | ||
|
|
ad4b4bd221 | ||
|
|
c9c9a149b6 | ||
|
|
0f9fdfe2de | ||
|
|
a76b63912d | ||
|
|
bc03e13d38 | ||
|
|
450aa9757d | ||
|
|
158389a4f2 | ||
|
|
95e89d5ef1 | ||
|
|
e107b8e5cd | ||
|
|
f875b43ede | ||
|
|
6242ef78c4 | ||
|
|
3c342c0768 | ||
|
|
5dba5fc79d | ||
|
|
713b7cf36d | ||
|
|
cb016b014b | ||
|
|
afb4523f63 | ||
|
|
05ad4986ac | ||
|
|
42dbd5f98f | ||
|
|
f58a514ce7 | ||
|
|
8fb384a5e1 | ||
|
|
c24302b5ce | ||
|
|
999ad9b642 | ||
|
|
36d6b4dafe | ||
|
|
06870a2e25 | ||
|
|
85eba2bb15 | ||
|
|
5dd6dcc215 | ||
|
|
8bf894a514 | ||
|
|
d3c67f2ae1 | ||
|
|
b60a282b60 | ||
|
|
0da1d40a19 | ||
|
|
aa3be915a0 | ||
|
|
0d97bfbc59 | ||
|
|
fe830337c9 | ||
|
|
5210b7d847 | ||
|
|
2f7ed4040b | ||
|
|
6376ba93a7 | ||
|
|
fd3a1cc9f4 | ||
|
|
208013ab76 | ||
|
|
770b3f910e | ||
|
|
5dce4a8eda | ||
|
|
6fcc9da948 | ||
|
|
bf93580ff9 | ||
|
|
0c2fe045d5 | ||
|
|
e14a3a6b0e | ||
|
|
e032740e90 | ||
|
|
78ad1e102d | ||
|
|
4f97cc7b68 | ||
|
|
df8f135532 | ||
|
|
0066801b0f | ||
|
|
0aa66ed6cb | ||
|
|
6903463f14 | ||
|
|
a473010fee | ||
|
|
ddf7a783a8 | ||
|
|
513e4d52fe | ||
|
|
17bb14e260 | ||
|
|
cd1258464b | ||
|
|
d3f5e0e6d7 | ||
|
|
e124829364 | ||
|
|
87b83dcc1b | ||
|
|
be9b47539d | ||
|
|
be6ddc314c | ||
|
|
c6f8a7b7e4 | ||
|
|
53da5612e9 | ||
|
|
6cc56b76f9 | ||
|
|
03cb65d555 | ||
|
|
73dd024933 | ||
|
|
1c8c92bf8f | ||
|
|
7e041a6759 | ||
|
|
ee05f14530 | ||
|
|
f0ba5178b7 | ||
|
|
df51ac932b | ||
|
|
e96b5f2eb1 | ||
|
|
4e59c89327 | ||
|
|
15676021a9 | ||
|
|
d3197a0d1e | ||
|
|
35692b335c | ||
|
|
cc5c810501 | ||
|
|
f2681f2dc8 | ||
|
|
fe0a22c790 | ||
|
|
186ab50458 | ||
|
|
b524c40176 | ||
|
|
642864959a | ||
|
|
7ef6c34149 | ||
|
|
5b32e42b8c | ||
|
|
1be8b8e525 | ||
|
|
3fae15c430 | ||
|
|
c7e78568d0 | ||
|
|
492b542136 | ||
|
|
0f4852d8c2 | ||
|
|
737c0c1823 | ||
|
|
5fadcb01e9 | ||
|
|
2b4f46a739 | ||
|
|
44fe37da1f | ||
|
|
abd4e89577 | ||
|
|
033798835a | ||
|
|
83c77957c1 | ||
|
|
b1bc1dc102 | ||
|
|
40b8a2c380 |
46
.claude/skills/github-pr-reviewer/SKILL.md
Normal file
46
.claude/skills/github-pr-reviewer/SKILL.md
Normal file
@@ -0,0 +1,46 @@
|
||||
---
|
||||
name: github-pr-reviewer
|
||||
description: Review a GitHub pull request and provide feedback comments. Use when the user says "review the current PR" or asks to review a specific PR.
|
||||
---
|
||||
|
||||
# Review GitHub Pull Request
|
||||
|
||||
## Preparation:
|
||||
- Check if the local commit matches the last one in the PR. If not, checkout the PR locally using 'gh pr checkout'.
|
||||
- CRITICAL: If 'gh pr checkout' fails for ANY reason, you MUST immediately STOP.
|
||||
- Do NOT attempt any workarounds.
|
||||
- Do NOT proceed with the review.
|
||||
- ALERT about the failure and WAIT for instructions.
|
||||
- This is a hard requirement - no exceptions.
|
||||
|
||||
## Follow these steps:
|
||||
1. Use 'gh pr view' to get the PR details and description.
|
||||
2. Use 'gh pr diff' to see all the changes in the PR.
|
||||
3. Analyze the code changes for:
|
||||
- Code quality and style consistency
|
||||
- Potential bugs or issues
|
||||
- Performance implications
|
||||
- Security concerns
|
||||
- Test coverage
|
||||
- Documentation updates if needed
|
||||
4. Ensure any existing review comments have been addressed.
|
||||
5. Generate constructive review comments in the CONSOLE. DO NOT POST TO GITHUB YOURSELF.
|
||||
|
||||
## IMPORTANT:
|
||||
- Just review. DO NOT make any changes
|
||||
- Be constructive and specific in your comments
|
||||
- Suggest improvements where appropriate
|
||||
- Only provide review feedback in the CONSOLE. DO NOT ACT ON GITHUB.
|
||||
- No need to run tests or linters, just review the code changes.
|
||||
- No need to highlight things that are already good.
|
||||
|
||||
## Output format:
|
||||
- List specific comments for each file/line that needs attention
|
||||
- In the end, summarize with an overall assessment (approve, request changes, or comment) and bullet point list of changes suggested, if any.
|
||||
- Example output:
|
||||
```
|
||||
Overall assessment: request changes.
|
||||
- [CRITICAL] Memory leak in homeassistant/components/sensor/my_sensor.py:143
|
||||
- [PROBLEM] Inefficient algorithm in homeassistant/helpers/data_processing.py:87
|
||||
- [SUGGESTION] Improve variable naming in homeassistant/helpers/config_validation.py:45
|
||||
```
|
||||
862
.github/copilot-instructions.md
vendored
862
.github/copilot-instructions.md
vendored
@@ -331,864 +331,6 @@ class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
```
|
||||
|
||||
|
||||
# Skill: Home Assistant Integration knowledge
|
||||
# Skills
|
||||
|
||||
### File Locations
|
||||
- **Integration code**: `./homeassistant/components/<integration_domain>/`
|
||||
- **Integration tests**: `./tests/components/<integration_domain>/`
|
||||
|
||||
## Integration Templates
|
||||
|
||||
### Standard Integration Structure
|
||||
```
|
||||
homeassistant/components/my_integration/
|
||||
├── __init__.py # Entry point with async_setup_entry
|
||||
├── manifest.json # Integration metadata and dependencies
|
||||
├── const.py # Domain and constants
|
||||
├── config_flow.py # UI configuration flow
|
||||
├── coordinator.py # Data update coordinator (if needed)
|
||||
├── entity.py # Base entity class (if shared patterns)
|
||||
├── sensor.py # Sensor platform
|
||||
├── strings.json # User-facing text and translations
|
||||
├── services.yaml # Service definitions (if applicable)
|
||||
└── quality_scale.yaml # Quality scale rule status
|
||||
```
|
||||
|
||||
An integration can have platforms as needed (e.g., `sensor.py`, `switch.py`, etc.). The following platforms have extra guidelines:
|
||||
- **Diagnostics**: [`platform-diagnostics.md`](platform-diagnostics.md) for diagnostic data collection
|
||||
<REFERENCE platform-diagnostics.md>
|
||||
# Integration Diagnostics
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/diagnostics.py`.
|
||||
|
||||
- **Required**: Implement diagnostic data collection
|
||||
- **Implementation**:
|
||||
```python
|
||||
TO_REDACT = [CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE]
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: MyConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": entry.runtime_data.data,
|
||||
}
|
||||
```
|
||||
- **Security**: Never expose passwords, tokens, or sensitive coordinates
|
||||
<END REFERENCE platform-diagnostics.md>
|
||||
|
||||
- **Repairs**: [`platform-repairs.md`](platform-repairs.md) for user-actionable repair issues
|
||||
<REFERENCE platform-repairs.md>
|
||||
# Repairs platform
|
||||
|
||||
Platform exists as `homeassistant/components/<domain>/repairs.py`.
|
||||
|
||||
- **Actionable Issues Required**: All repair issues must be actionable for end users
|
||||
- **Issue Content Requirements**:
|
||||
- Clearly explain what is happening
|
||||
- Provide specific steps users need to take to resolve the issue
|
||||
- Use friendly, helpful language
|
||||
- Include relevant context (device names, error details, etc.)
|
||||
- **Implementation**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"outdated_version",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="outdated_version",
|
||||
)
|
||||
```
|
||||
- **Translation Strings Requirements**: Must contain user-actionable text in `strings.json`:
|
||||
```json
|
||||
{
|
||||
"issues": {
|
||||
"outdated_version": {
|
||||
"title": "Device firmware is outdated",
|
||||
"description": "Your device firmware version {current_version} is below the minimum required version {min_version}. To fix this issue: 1) Open the manufacturer's mobile app, 2) Navigate to device settings, 3) Select 'Update Firmware', 4) Wait for the update to complete, then 5) Restart Home Assistant."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **String Content Must Include**:
|
||||
- What the problem is
|
||||
- Why it matters
|
||||
- Exact steps to resolve (numbered list when multiple steps)
|
||||
- What to expect after following the steps
|
||||
- **Avoid Vague Instructions**: Don't just say "update firmware" - provide specific steps
|
||||
- **Severity Guidelines**:
|
||||
- `CRITICAL`: Reserved for extreme scenarios only
|
||||
- `ERROR`: Requires immediate user attention
|
||||
- `WARNING`: Indicates future potential breakage
|
||||
- **Additional Attributes**:
|
||||
```python
|
||||
ir.async_create_issue(
|
||||
hass, DOMAIN, "issue_id",
|
||||
breaks_in_ha_version="2024.1.0",
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="issue_description",
|
||||
)
|
||||
```
|
||||
- Only create issues for problems users can potentially resolve
|
||||
<END REFERENCE platform-repairs.md>
|
||||
|
||||
|
||||
### Minimal Integration Checklist
|
||||
- [ ] `manifest.json` with required fields (domain, name, codeowners, etc.)
|
||||
- [ ] `__init__.py` with `async_setup_entry` and `async_unload_entry`
|
||||
- [ ] `config_flow.py` with UI configuration support
|
||||
- [ ] `const.py` with `DOMAIN` constant
|
||||
- [ ] `strings.json` with at least config flow text
|
||||
- [ ] Platform files (`sensor.py`, etc.) as needed
|
||||
- [ ] `quality_scale.yaml` with rule status tracking
|
||||
|
||||
## Integration Quality Scale
|
||||
|
||||
Home Assistant uses an Integration Quality Scale to ensure code quality and consistency. The quality level determines which rules apply:
|
||||
|
||||
### Quality Scale Levels
|
||||
- **Bronze**: Basic requirements (ALL Bronze rules are mandatory)
|
||||
- **Silver**: Enhanced functionality
|
||||
- **Gold**: Advanced features
|
||||
- **Platinum**: Highest quality standards
|
||||
|
||||
### Quality Scale Progression
|
||||
- **Bronze → Silver**: Add entity unavailability, parallel updates, auth flows
|
||||
- **Silver → Gold**: Add device management, diagnostics, translations
|
||||
- **Gold → Platinum**: Add strict typing, async dependencies, websession injection
|
||||
|
||||
### How Rules Apply
|
||||
1. **Check `manifest.json`**: Look for `"quality_scale"` key to determine integration level
|
||||
2. **Bronze Rules**: Always required for any integration with quality scale
|
||||
3. **Higher Tier Rules**: Only apply if integration targets that tier or higher
|
||||
4. **Rule Status**: Check `quality_scale.yaml` in integration folder for:
|
||||
- `done`: Rule implemented
|
||||
- `exempt`: Rule doesn't apply (with reason in comment)
|
||||
- `todo`: Rule needs implementation
|
||||
|
||||
### Example `quality_scale.yaml` Structure
|
||||
```yaml
|
||||
rules:
|
||||
# Bronze (mandatory)
|
||||
config-flow: done
|
||||
entity-unique-id: done
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
|
||||
# Silver (if targeting Silver+)
|
||||
entity-unavailable: done
|
||||
parallel-updates: done
|
||||
|
||||
# Gold (if targeting Gold+)
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
||||
# Platinum (if targeting Platinum)
|
||||
strict-typing: done
|
||||
```
|
||||
|
||||
**When Reviewing/Creating Code**: Always check the integration's quality scale level and exemption status before applying rules.
|
||||
|
||||
## Code Organization
|
||||
|
||||
### Core Locations
|
||||
- Shared constants: `homeassistant/const.py` (use these instead of hardcoding)
|
||||
- Integration structure:
|
||||
- `homeassistant/components/{domain}/const.py` - Constants
|
||||
- `homeassistant/components/{domain}/models.py` - Data models
|
||||
- `homeassistant/components/{domain}/coordinator.py` - Update coordinator
|
||||
- `homeassistant/components/{domain}/config_flow.py` - Configuration flow
|
||||
- `homeassistant/components/{domain}/{platform}.py` - Platform implementations
|
||||
|
||||
### Common Modules
|
||||
- **coordinator.py**: Centralize data fetching logic
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=1),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
- **entity.py**: Base entity definitions to reduce duplication
|
||||
```python
|
||||
class MyEntity(CoordinatorEntity[MyCoordinator]):
|
||||
_attr_has_entity_name = True
|
||||
```
|
||||
|
||||
### Runtime Data Storage
|
||||
- **Use ConfigEntry.runtime_data**: Store non-persistent runtime data
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyIntegrationConfigEntry) -> bool:
|
||||
client = MyClient(entry.data[CONF_HOST])
|
||||
entry.runtime_data = client
|
||||
```
|
||||
|
||||
### Manifest Requirements
|
||||
- **Required Fields**: `domain`, `name`, `codeowners`, `integration_type`, `documentation`, `requirements`
|
||||
- **Integration Types**: `device`, `hub`, `service`, `system`, `helper`
|
||||
- **IoT Class**: Always specify connectivity method (e.g., `cloud_polling`, `local_polling`, `local_push`)
|
||||
- **Discovery Methods**: Add when applicable: `zeroconf`, `dhcp`, `bluetooth`, `ssdp`, `usb`
|
||||
- **Dependencies**: Include platform dependencies (e.g., `application_credentials`, `bluetooth_adapters`)
|
||||
|
||||
### Config Flow Patterns
|
||||
- **Version Control**: Always set `VERSION = 1` and `MINOR_VERSION = 1`
|
||||
- **Unique ID Management**:
|
||||
```python
|
||||
await self.async_set_unique_id(device_unique_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
```
|
||||
- **Error Handling**: Define errors in `strings.json` under `config.error`
|
||||
- **Step Methods**: Use standard naming (`async_step_user`, `async_step_discovery`, etc.)
|
||||
|
||||
### Integration Ownership
|
||||
- **manifest.json**: Add GitHub usernames to `codeowners`:
|
||||
```json
|
||||
{
|
||||
"domain": "my_integration",
|
||||
"name": "My Integration",
|
||||
"codeowners": ["@me"]
|
||||
}
|
||||
```
|
||||
|
||||
### Async Dependencies (Platinum)
|
||||
- **Requirement**: All dependencies must use asyncio
|
||||
- Ensures efficient task handling without thread context switching
|
||||
|
||||
### WebSession Injection (Platinum)
|
||||
- **Pass WebSession**: Support passing web sessions to dependencies
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Set up integration from config entry."""
|
||||
client = MyClient(entry.data[CONF_HOST], async_get_clientsession(hass))
|
||||
```
|
||||
- For cookies: Use `async_create_clientsession` (aiohttp) or `create_async_httpx_client` (httpx)
|
||||
|
||||
### Data Update Coordinator
|
||||
- **Standard Pattern**: Use for efficient data management
|
||||
```python
|
||||
class MyCoordinator(DataUpdateCoordinator):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=5),
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
self.client = client
|
||||
|
||||
async def _async_update_data(self):
|
||||
try:
|
||||
return await self.client.fetch_data()
|
||||
except ApiError as err:
|
||||
raise UpdateFailed(f"API communication error: {err}")
|
||||
```
|
||||
- **Error Types**: Use `UpdateFailed` for API errors, `ConfigEntryAuthFailed` for auth issues
|
||||
- **Config Entry**: Always pass `config_entry` parameter to coordinator - it's accepted and recommended
|
||||
|
||||
## Integration Guidelines
|
||||
|
||||
### Configuration Flow
|
||||
- **UI Setup Required**: All integrations must support configuration via UI
|
||||
- **Manifest**: Set `"config_flow": true` in `manifest.json`
|
||||
- **Data Storage**:
|
||||
- Connection-critical config: Store in `ConfigEntry.data`
|
||||
- Non-critical settings: Store in `ConfigEntry.options`
|
||||
- **Validation**: Always validate user input before creating entries
|
||||
- **Config Entry Naming**:
|
||||
- ❌ Do NOT allow users to set config entry names in config flows
|
||||
- Names are automatically generated or can be customized later in UI
|
||||
- ✅ Exception: Helper integrations MAY allow custom names in config flow
|
||||
- **Connection Testing**: Test device/service connection during config flow:
|
||||
```python
|
||||
try:
|
||||
await client.get_data()
|
||||
except MyException:
|
||||
errors["base"] = "cannot_connect"
|
||||
```
|
||||
- **Duplicate Prevention**: Prevent duplicate configurations:
|
||||
```python
|
||||
# Using unique ID
|
||||
await self.async_set_unique_id(identifier)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
# Using unique data
|
||||
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
|
||||
```
|
||||
|
||||
### Reauthentication Support
|
||||
- **Required Method**: Implement `async_step_reauth` in config flow
|
||||
- **Credential Updates**: Allow users to update credentials without re-adding
|
||||
- **Validation**: Verify account matches existing unique ID:
|
||||
```python
|
||||
await self.async_set_unique_id(user_id)
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]}
|
||||
)
|
||||
```
|
||||
|
||||
### Reconfiguration Flow
|
||||
- **Purpose**: Allow configuration updates without removing device
|
||||
- **Implementation**: Add `async_step_reconfigure` method
|
||||
- **Validation**: Prevent changing underlying account with `_abort_if_unique_id_mismatch`
|
||||
|
||||
### Device Discovery
|
||||
- **Manifest Configuration**: Add discovery method (zeroconf, dhcp, etc.)
|
||||
```json
|
||||
{
|
||||
"zeroconf": ["_mydevice._tcp.local."]
|
||||
}
|
||||
```
|
||||
- **Discovery Handler**: Implement appropriate `async_step_*` method:
|
||||
```python
|
||||
async def async_step_zeroconf(self, discovery_info):
|
||||
"""Handle zeroconf discovery."""
|
||||
await self.async_set_unique_id(discovery_info.properties["serialno"])
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: discovery_info.host})
|
||||
```
|
||||
- **Network Updates**: Use discovery to update dynamic IP addresses
|
||||
|
||||
### Network Discovery Implementation
|
||||
- **Zeroconf/mDNS**: Use async instances
|
||||
```python
|
||||
aiozc = await zeroconf.async_get_async_instance(hass)
|
||||
```
|
||||
- **SSDP Discovery**: Register callbacks with cleanup
|
||||
```python
|
||||
entry.async_on_unload(
|
||||
ssdp.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"st": "urn:schemas-upnp-org:device:ZonePlayer:1"}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
### Bluetooth Integration
|
||||
- **Manifest Dependencies**: Add `bluetooth_adapters` to dependencies
|
||||
- **Connectable**: Set `"connectable": true` for connection-required devices
|
||||
- **Scanner Usage**: Always use shared scanner instance
|
||||
```python
|
||||
scanner = bluetooth.async_get_scanner()
|
||||
entry.async_on_unload(
|
||||
bluetooth.async_register_callback(
|
||||
hass, _async_discovered_device,
|
||||
{"service_uuid": "example_uuid"},
|
||||
bluetooth.BluetoothScanningMode.ACTIVE
|
||||
)
|
||||
)
|
||||
```
|
||||
- **Connection Handling**: Never reuse `BleakClient` instances, use 10+ second timeouts
|
||||
|
||||
### Setup Validation
|
||||
- **Test Before Setup**: Verify integration can be set up in `async_setup_entry`
|
||||
- **Exception Handling**:
|
||||
- `ConfigEntryNotReady`: Device offline or temporary failure
|
||||
- `ConfigEntryAuthFailed`: Authentication issues
|
||||
- `ConfigEntryError`: Unresolvable setup problems
|
||||
|
||||
### Config Entry Unloading
|
||||
- **Required**: Implement `async_unload_entry` for runtime removal/reload
|
||||
- **Platform Unloading**: Use `hass.config_entries.async_unload_platforms`
|
||||
- **Cleanup**: Register callbacks with `entry.async_on_unload`:
|
||||
```python
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: MyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
entry.runtime_data.listener() # Clean up resources
|
||||
return unload_ok
|
||||
```
|
||||
|
||||
### Service Actions
|
||||
- **Registration**: Register all service actions in `async_setup`, NOT in `async_setup_entry`
|
||||
- **Validation**: Check config entry existence and loaded state:
|
||||
```python
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def service_action(call: ServiceCall) -> ServiceResponse:
|
||||
if not (entry := hass.config_entries.async_get_entry(call.data[ATTR_CONFIG_ENTRY_ID])):
|
||||
raise ServiceValidationError("Entry not found")
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise ServiceValidationError("Entry not loaded")
|
||||
```
|
||||
- **Exception Handling**: Raise appropriate exceptions:
|
||||
```python
|
||||
# For invalid input
|
||||
if end_date < start_date:
|
||||
raise ServiceValidationError("End date must be after start date")
|
||||
|
||||
# For service errors
|
||||
try:
|
||||
await client.set_schedule(start_date, end_date)
|
||||
except MyConnectionError as err:
|
||||
raise HomeAssistantError("Could not connect to the schedule") from err
|
||||
```
|
||||
|
||||
### Service Registration Patterns
|
||||
- **Entity Services**: Register on platform setup
|
||||
```python
|
||||
platform.async_register_entity_service(
|
||||
"my_entity_service",
|
||||
{vol.Required("parameter"): cv.string},
|
||||
"handle_service_method"
|
||||
)
|
||||
```
|
||||
- **Service Schema**: Always validate input
|
||||
```python
|
||||
SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Required("entity_id"): cv.entity_ids,
|
||||
vol.Required("parameter"): cv.string,
|
||||
vol.Optional("timeout", default=30): cv.positive_int,
|
||||
})
|
||||
```
|
||||
- **Services File**: Create `services.yaml` with descriptions and field definitions
|
||||
|
||||
### Polling
|
||||
- Use update coordinator pattern when possible
|
||||
- **Polling intervals are NOT user-configurable**: Never add scan_interval, update_interval, or polling frequency options to config flows or config entries
|
||||
- **Integration determines intervals**: Set `update_interval` programmatically based on integration logic, not user input
|
||||
- **Minimum Intervals**:
|
||||
- Local network: 5 seconds
|
||||
- Cloud services: 60 seconds
|
||||
- **Parallel Updates**: Specify number of concurrent updates:
|
||||
```python
|
||||
PARALLEL_UPDATES = 1 # Serialize updates to prevent overwhelming device
|
||||
# OR
|
||||
PARALLEL_UPDATES = 0 # Unlimited (for coordinator-based or read-only)
|
||||
```
|
||||
|
||||
## Entity Development
|
||||
|
||||
### Unique IDs
|
||||
- **Required**: Every entity must have a unique ID for registry tracking
|
||||
- Must be unique per platform (not per integration)
|
||||
- Don't include integration domain or platform in ID
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
def __init__(self, device_id: str) -> None:
|
||||
self._attr_unique_id = f"{device_id}_temperature"
|
||||
```
|
||||
|
||||
**Acceptable ID Sources**:
|
||||
- Device serial numbers
|
||||
- MAC addresses (formatted using `format_mac` from device registry)
|
||||
- Physical identifiers (printed/EEPROM)
|
||||
- Config entry ID as last resort: `f"{entry.entry_id}-battery"`
|
||||
|
||||
**Never Use**:
|
||||
- IP addresses, hostnames, URLs
|
||||
- Device names
|
||||
- Email addresses, usernames
|
||||
|
||||
### Entity Descriptions
|
||||
- **Lambda/Anonymous Functions**: Often used in EntityDescription for value transformation
|
||||
- **Multiline Lambdas**: When lambdas exceed line length, wrap in parentheses for readability
|
||||
- **Bad pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: round(data["temp_value"] * 1.8 + 32, 1) if data.get("temp_value") is not None else None, # ❌ Too long
|
||||
)
|
||||
```
|
||||
- **Good pattern**:
|
||||
```python
|
||||
SensorEntityDescription(
|
||||
key="temperature",
|
||||
name="Temperature",
|
||||
value_fn=lambda data: ( # ✅ Parenthesis on same line as lambda
|
||||
round(data["temp_value"] * 1.8 + 32, 1)
|
||||
if data.get("temp_value") is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
```
|
||||
|
||||
### Entity Naming
|
||||
- **Use has_entity_name**: Set `_attr_has_entity_name = True`
|
||||
- **For specific fields**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
def __init__(self, device: Device, field: str) -> None:
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
)
|
||||
self._attr_name = field # e.g., "temperature", "humidity"
|
||||
```
|
||||
- **For device itself**: Set `_attr_name = None`
|
||||
|
||||
### Event Lifecycle Management
|
||||
- **Subscribe in `async_added_to_hass`**:
|
||||
```python
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Subscribe to events."""
|
||||
self.async_on_remove(
|
||||
self.client.events.subscribe("my_event", self._handle_event)
|
||||
)
|
||||
```
|
||||
- **Unsubscribe in `async_will_remove_from_hass`** if not using `async_on_remove`
|
||||
- Never subscribe in `__init__` or other methods
|
||||
|
||||
### State Handling
|
||||
- Unknown values: Use `None` (not "unknown" or "unavailable")
|
||||
- Availability: Implement `available()` property instead of using "unavailable" state
|
||||
|
||||
### Entity Availability
|
||||
- **Mark Unavailable**: When data cannot be fetched from device/service
|
||||
- **Coordinator Pattern**:
|
||||
```python
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.identifier in self.coordinator.data
|
||||
```
|
||||
- **Direct Update Pattern**:
|
||||
```python
|
||||
async def async_update(self) -> None:
|
||||
"""Update entity."""
|
||||
try:
|
||||
data = await self.client.get_data()
|
||||
except MyException:
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = data.value
|
||||
```
|
||||
|
||||
### Extra State Attributes
|
||||
- All attribute keys must always be present
|
||||
- Unknown values: Use `None`
|
||||
- Provide descriptive attributes
|
||||
|
||||
## Device Management
|
||||
|
||||
### Device Registry
|
||||
- **Create Devices**: Group related entities under devices
|
||||
- **Device Info**: Provide comprehensive metadata:
|
||||
```python
|
||||
_attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, device.mac)},
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
manufacturer="My Company",
|
||||
model="My Sensor",
|
||||
sw_version=device.version,
|
||||
)
|
||||
```
|
||||
- For services: Add `entry_type=DeviceEntryType.SERVICE`
|
||||
|
||||
### Dynamic Device Addition
|
||||
- **Auto-detect New Devices**: After initial setup
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data)
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities([MySensor(coordinator, device_id) for device_id in new_devices])
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
```
|
||||
|
||||
### Stale Device Removal
|
||||
- **Auto-remove**: When devices disappear from hub/account
|
||||
- **Device Registry Update**:
|
||||
```python
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
```
|
||||
- **Manual Deletion**: Implement `async_remove_config_entry_device` when needed
|
||||
|
||||
### Entity Categories
|
||||
- **Required**: Assign appropriate category to entities
|
||||
- **Implementation**: Set `_attr_entity_category`
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
```
|
||||
- Categories include: `DIAGNOSTIC` for system/technical information
|
||||
|
||||
### Device Classes
|
||||
- **Use When Available**: Set appropriate device class for entity type
|
||||
```python
|
||||
class MyTemperatureSensor(SensorEntity):
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
```
|
||||
- Provides context for: unit conversion, voice control, UI representation
|
||||
|
||||
### Disabled by Default
|
||||
- **Disable Noisy/Less Popular Entities**: Reduce resource usage
|
||||
```python
|
||||
class MySignalStrengthSensor(SensorEntity):
|
||||
_attr_entity_registry_enabled_default = False
|
||||
```
|
||||
- Target: frequently changing states, technical diagnostics
|
||||
|
||||
### Entity Translations
|
||||
- **Required with has_entity_name**: Support international users
|
||||
- **Implementation**:
|
||||
```python
|
||||
class MySensor(SensorEntity):
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "phase_voltage"
|
||||
```
|
||||
- Create `strings.json` with translations:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"phase_voltage": {
|
||||
"name": "Phase voltage"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Exception Translations (Gold)
|
||||
- **Translatable Errors**: Use translation keys for user-facing exceptions
|
||||
- **Implementation**:
|
||||
```python
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="end_date_before_start_date",
|
||||
)
|
||||
```
|
||||
- Add to `strings.json`:
|
||||
```json
|
||||
{
|
||||
"exceptions": {
|
||||
"end_date_before_start_date": {
|
||||
"message": "The end date cannot be before the start date."
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Icon Translations (Gold)
|
||||
- **Dynamic Icons**: Support state and range-based icon selection
|
||||
- **State-based Icons**:
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree",
|
||||
"state": {
|
||||
"high": "mdi:tree-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
- **Range-based Icons** (for numeric values):
|
||||
```json
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"battery_level": {
|
||||
"default": "mdi:battery-unknown",
|
||||
"range": {
|
||||
"0": "mdi:battery-outline",
|
||||
"90": "mdi:battery-90",
|
||||
"100": "mdi:battery"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing Requirements
|
||||
|
||||
- **Location**: `tests/components/{domain}/`
|
||||
- **Coverage Requirement**: Above 95% test coverage for all modules
|
||||
- **Best Practices**:
|
||||
- Use pytest fixtures from `tests.common`
|
||||
- Mock all external dependencies
|
||||
- Use snapshots for complex data structures
|
||||
- Follow existing test patterns
|
||||
|
||||
### Config Flow Testing
|
||||
- **100% Coverage Required**: All config flow paths must be tested
|
||||
- **Test Scenarios**:
|
||||
- All flow initiation methods (user, discovery, import)
|
||||
- Successful configuration paths
|
||||
- Error recovery scenarios
|
||||
- Prevention of duplicate entries
|
||||
- Flow completion after errors
|
||||
|
||||
### Testing
|
||||
- **Integration-specific tests** (recommended):
|
||||
```bash
|
||||
pytest ./tests/components/<integration_domain> \
|
||||
--cov=homeassistant.components.<integration_domain> \
|
||||
--cov-report term-missing \
|
||||
--durations-min=1 \
|
||||
--durations=0 \
|
||||
--numprocesses=auto
|
||||
```
|
||||
|
||||
### Testing Best Practices
|
||||
- **Never access `hass.data` directly** - Use fixtures and proper integration setup instead
|
||||
- **Use snapshot testing** - For verifying entity states and attributes
|
||||
- **Test through integration setup** - Don't test entities in isolation
|
||||
- **Mock external APIs** - Use fixtures with realistic JSON data
|
||||
- **Verify registries** - Ensure entities are properly registered with devices
|
||||
|
||||
### Config Flow Testing Template
|
||||
```python
|
||||
async def test_user_flow_success(hass, mock_api):
|
||||
"""Test successful user flow."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
# Test form submission
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.CREATE_ENTRY
|
||||
assert result["title"] == "My Device"
|
||||
assert result["data"] == TEST_USER_INPUT
|
||||
|
||||
async def test_flow_connection_error(hass, mock_api_error):
|
||||
"""Test connection error handling."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
)
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"], user_input=TEST_USER_INPUT
|
||||
)
|
||||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["errors"] == {"base": "cannot_connect"}
|
||||
```
|
||||
|
||||
### Entity Testing Patterns
|
||||
```python
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Overridden fixture to specify platforms to test."""
|
||||
return [Platform.SENSOR] # Or another specific platform as needed.
|
||||
|
||||
@pytest.mark.usefixtures("entity_registry_enabled_by_default", "init_integration")
|
||||
async def test_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the sensor entities."""
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
# Ensure entities are correctly assigned to device
|
||||
device_entry = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, "device_unique_id")}
|
||||
)
|
||||
assert device_entry
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, mock_config_entry.entry_id
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
assert entity_entry.device_id == device_entry.id
|
||||
```
|
||||
|
||||
### Mock Patterns
|
||||
```python
|
||||
# Modern integration fixture setup
|
||||
@pytest.fixture
|
||||
def mock_config_entry() -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="My Integration",
|
||||
domain=DOMAIN,
|
||||
data={CONF_HOST: "127.0.0.1", CONF_API_KEY: "test_key"},
|
||||
unique_id="device_unique_id",
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_device_api() -> Generator[MagicMock]:
|
||||
"""Return a mocked device API."""
|
||||
with patch("homeassistant.components.my_integration.MyDeviceAPI", autospec=True) as api_mock:
|
||||
api = api_mock.return_value
|
||||
api.get_data.return_value = MyDeviceData.from_json(
|
||||
load_fixture("device_data.json", DOMAIN)
|
||||
)
|
||||
yield api
|
||||
|
||||
@pytest.fixture
|
||||
def platforms() -> list[Platform]:
|
||||
"""Fixture to specify platforms to test."""
|
||||
return PLATFORMS
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_device_api: MagicMock,
|
||||
platforms: list[Platform],
|
||||
) -> MockConfigEntry:
|
||||
"""Set up the integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
with patch("homeassistant.components.my_integration.PLATFORMS", platforms):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
return mock_config_entry
|
||||
```
|
||||
|
||||
## Debugging & Troubleshooting
|
||||
|
||||
### Common Issues & Solutions
|
||||
- **Integration won't load**: Check `manifest.json` syntax and required fields
|
||||
- **Entities not appearing**: Verify `unique_id` and `has_entity_name` implementation
|
||||
- **Config flow errors**: Check `strings.json` entries and error handling
|
||||
- **Discovery not working**: Verify manifest discovery configuration and callbacks
|
||||
- **Tests failing**: Check mock setup and async context
|
||||
|
||||
### Debug Logging Setup
|
||||
```python
|
||||
# Enable debug logging in tests
|
||||
caplog.set_level(logging.DEBUG, logger="my_integration")
|
||||
|
||||
# In integration code - use proper logging
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_LOGGER.debug("Processing data: %s", data) # Use lazy logging
|
||||
```
|
||||
|
||||
### Validation Commands
|
||||
```bash
|
||||
# Check specific integration
|
||||
python -m script.hassfest --integration-path homeassistant/components/my_integration
|
||||
|
||||
# Validate quality scale
|
||||
# Check quality_scale.yaml against current rules
|
||||
|
||||
# Run integration tests with coverage
|
||||
pytest ./tests/components/my_integration \
|
||||
--cov=homeassistant.components.my_integration \
|
||||
--cov-report term-missing
|
||||
```
|
||||
- Home Assistant Integration knowledge: .claude/skills/integrations/SKILL.md
|
||||
|
||||
243
.github/workflows/builder.yml
vendored
243
.github/workflows/builder.yml
vendored
@@ -10,7 +10,6 @@ on:
|
||||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
@@ -36,16 +35,17 @@ jobs:
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
base_image_version: ${{ env.BASE_IMAGE_VERSION }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
@@ -75,44 +75,9 @@ jobs:
|
||||
env:
|
||||
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
|
||||
|
||||
- name: Archive translations
|
||||
shell: bash
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
if-no-files-found: error
|
||||
|
||||
build_base:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@5c98f0b039f36ef966fdb7dfa9779262785ecb05 # v14
|
||||
if: steps.version.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@2536c51d3d126276eb39f74d6bc9c72ac6ef30d3 # v16
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -122,8 +87,8 @@ jobs:
|
||||
name: wheels
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@5c98f0b039f36ef966fdb7dfa9779262785ecb05 # v14
|
||||
if: steps.version.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@2536c51d3d126276eb39f74d6bc9c72ac6ef30d3 # v16
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: OHF-Voice/intents-package
|
||||
@@ -132,18 +97,12 @@ jobs:
|
||||
workflow_conclusion: success
|
||||
name: package
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Adjust nightly version
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
if: steps.version.outputs.channel == 'dev'
|
||||
shell: bash
|
||||
env:
|
||||
UV_PRERELEASE: allow
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
VERSION: ${{ steps.version.outputs.version }}
|
||||
run: |
|
||||
python3 -m pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install packaging tomli
|
||||
@@ -181,92 +140,72 @@ jobs:
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt requirements.txt
|
||||
fi
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
|
||||
- name: Write meta info file
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${GITHUB_SHA};${GITHUB_REF};${GITHUB_EVENT_NAME};${GITHUB_ACTOR}" > rootfs/OFFICIAL_IMAGE
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
- name: Upload build context overlay
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: build-context
|
||||
if-no-files-found: ignore
|
||||
path: |
|
||||
homeassistant/components/*/translations/
|
||||
rootfs/OFFICIAL_IMAGE
|
||||
home_assistant_frontend-*.whl
|
||||
home_assistant_intents-*.whl
|
||||
homeassistant/const.py
|
||||
homeassistant/components/frontend/manifest.json
|
||||
homeassistant/components/conversation/manifest.json
|
||||
homeassistant/package_constraints.txt
|
||||
requirements_all.txt
|
||||
requirements.txt
|
||||
pyproject.toml
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
build_base:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read # To check out the repository
|
||||
packages: write # To push to GHCR
|
||||
id-token: write # For cosign signing
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-24.04
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
shell: bash
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
run: |
|
||||
echo "base_image=ghcr.io/home-assistant/${ARCH}-homeassistant-base:${BASE_IMAGE_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${ARCH}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify base image signature
|
||||
env:
|
||||
BASE_IMAGE: ${{ steps.vars.outputs.base_image }}
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
|
||||
"${BASE_IMAGE}"
|
||||
|
||||
- name: Verify cache image signature
|
||||
id: cache
|
||||
continue-on-error: true
|
||||
env:
|
||||
CACHE_IMAGE: ${{ steps.vars.outputs.cache_image }}
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
|
||||
"${CACHE_IMAGE}"
|
||||
- name: Download build context overlay
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: build-context
|
||||
|
||||
- name: Build base image
|
||||
id: build
|
||||
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6.19.2
|
||||
uses: home-assistant/builder/actions/build-image@gha-builder # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ steps.vars.outputs.platform }}
|
||||
push: true
|
||||
cache-from: ${{ steps.cache.outcome == 'success' && steps.vars.outputs.cache_image || '' }}
|
||||
arch: ${{ matrix.arch }}
|
||||
build-args: |
|
||||
BUILD_FROM=${{ steps.vars.outputs.base_image }}
|
||||
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
outputs: type=image,push=true,compression=zstd,compression-level=9,force-compression=true,oci-mediatypes=true
|
||||
labels: |
|
||||
io.hass.arch=${{ matrix.arch }}
|
||||
io.hass.version=${{ needs.init.outputs.version }}
|
||||
org.opencontainers.image.created=${{ steps.vars.outputs.created }}
|
||||
org.opencontainers.image.version=${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Sign image
|
||||
env:
|
||||
ARCH: ${{ matrix.arch }}
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
DIGEST: ${{ steps.build.outputs.digest }}
|
||||
run: |
|
||||
cosign sign --yes "ghcr.io/home-assistant/${ARCH}-homeassistant:${VERSION}@${DIGEST}"
|
||||
BUILD_FROM=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ needs.init.outputs.base_image_version }}
|
||||
cache-gha: false
|
||||
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
|
||||
context: .
|
||||
cosign-base-identity: "https://github.com/home-assistant/docker/.*"
|
||||
cosign-base-verify: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ needs.init.outputs.base_image_version }}
|
||||
image: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant
|
||||
image-tags: ${{ needs.init.outputs.version }}
|
||||
push: true
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
@@ -315,35 +254,38 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set build additional args
|
||||
- name: Compute extra tags
|
||||
id: tags
|
||||
shell: bash
|
||||
env:
|
||||
VERSION: ${{ needs.init.outputs.version }}
|
||||
run: |
|
||||
# Create general tags
|
||||
if [[ "${VERSION}" =~ d ]]; then
|
||||
echo "BUILD_ARGS=--additional-tag dev" >> $GITHUB_ENV
|
||||
echo "extra_tags=dev" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${VERSION}" =~ b ]]; then
|
||||
echo "BUILD_ARGS=--additional-tag beta" >> $GITHUB_ENV
|
||||
echo "extra_tags=beta" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "BUILD_ARGS=--additional-tag stable" >> $GITHUB_ENV
|
||||
echo "extra_tags=stable" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
|
||||
- name: Build machine image
|
||||
uses: home-assistant/builder/actions/build-image@gha-builder # zizmor: ignore[unpinned-uses]
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@6cb4fd3d1338b6e22d0958a4bcb53e0965ea63b4 # 2026.02.1
|
||||
with:
|
||||
image: ${{ matrix.arch }}
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--target /data/machine \
|
||||
--cosign \
|
||||
--machine "${{ needs.init.outputs.version }}=${{ matrix.machine }}"
|
||||
arch: ${{ matrix.arch }}
|
||||
build-args: |
|
||||
BUILD_FROM=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
cache-gha: false
|
||||
container-registry-password: ${{ secrets.GITHUB_TOKEN }}
|
||||
context: machine/
|
||||
cosign-base-identity: "https://github.com/home-assistant/core/.*"
|
||||
cosign-base-verify: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
file: machine/${{ matrix.machine }}
|
||||
image: ghcr.io/home-assistant/${{ matrix.machine }}-homeassistant
|
||||
image-tags: |
|
||||
${{ needs.init.outputs.version }}
|
||||
${{ steps.tags.outputs.extra_tags }}
|
||||
push: true
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
|
||||
publish_ha:
|
||||
name: Publish version files
|
||||
@@ -538,20 +480,15 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
- name: Download build context overlay
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: translations
|
||||
|
||||
- name: Extract translations
|
||||
run: |
|
||||
tar xvf translations.tar.gz
|
||||
rm translations.tar.gz
|
||||
name: build-context
|
||||
|
||||
- name: Build package
|
||||
shell: bash
|
||||
|
||||
92
.github/workflows/ci.yaml
vendored
92
.github/workflows/ci.yaml
vendored
@@ -41,8 +41,7 @@ env:
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2026.4"
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
ALL_PYTHON_VERSIONS: "['3.14.2']"
|
||||
ADDITIONAL_PYTHON_VERSIONS: "[]"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -166,6 +165,11 @@ jobs:
|
||||
tests_glob=""
|
||||
lint_only=""
|
||||
skip_coverage=""
|
||||
default_python=$(cat .python-version)
|
||||
all_python_versions=$(jq -cn \
|
||||
--arg default_python "${default_python}" \
|
||||
--argjson additional_python_versions "${ADDITIONAL_PYTHON_VERSIONS}" \
|
||||
'[$default_python] + $additional_python_versions')
|
||||
|
||||
if [[ "${INTEGRATION_CHANGES}" != "[]" ]];
|
||||
then
|
||||
@@ -235,8 +239,8 @@ jobs:
|
||||
echo "mariadb_groups=${mariadb_groups}" >> $GITHUB_OUTPUT
|
||||
echo "postgresql_groups: ${postgresql_groups}"
|
||||
echo "postgresql_groups=${postgresql_groups}" >> $GITHUB_OUTPUT
|
||||
echo "python_versions: ${ALL_PYTHON_VERSIONS}"
|
||||
echo "python_versions=${ALL_PYTHON_VERSIONS}" >> $GITHUB_OUTPUT
|
||||
echo "python_versions: ${all_python_versions}"
|
||||
echo "python_versions=${all_python_versions}" >> $GITHUB_OUTPUT
|
||||
echo "test_full_suite: ${test_full_suite}"
|
||||
echo "test_full_suite=${test_full_suite}" >> $GITHUB_OUTPUT
|
||||
echo "integrations_glob: ${integrations_glob}"
|
||||
@@ -452,7 +456,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -503,13 +507,13 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
@@ -540,13 +544,13 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
@@ -576,11 +580,11 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Run gen_copilot_instructions.py
|
||||
run: |
|
||||
@@ -605,7 +609,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
||||
uses: actions/dependency-review-action@05fe4576374b728f0c523d6a13d64c25081e0803 # v4.8.3
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -653,7 +657,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${PYTHON_VERSION}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -682,13 +686,13 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
@@ -735,13 +739,13 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
@@ -786,11 +790,11 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Generate partial mypy restore key
|
||||
id: generate-mypy-key
|
||||
@@ -798,7 +802,7 @@ jobs:
|
||||
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
|
||||
echo "version=${mypy_version}" >> $GITHUB_OUTPUT
|
||||
echo "key=mypy-${MYPY_CACHE_VERSION}-${mypy_version}-${HA_SHORT_VERSION}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
@@ -879,13 +883,13 @@ jobs:
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
|
||||
with:
|
||||
@@ -901,7 +905,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${TEST_GROUP_COUNT} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -978,7 +982,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1020,14 +1024,14 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1040,7 +1044,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: test-results-full-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1177,7 +1181,7 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1185,7 +1189,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1199,7 +1203,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: test-results-mariadb-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1338,7 +1342,7 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1346,7 +1350,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1360,7 +1364,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: test-results-postgres-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1387,7 +1391,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1514,14 +1518,14 @@ jobs:
|
||||
2>&1 | tee pytest-${PYTHON_VERSION}-${TEST_GROUP}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1534,7 +1538,7 @@ jobs:
|
||||
mv "junit.xml-tmp" "junit.xml"
|
||||
- name: Upload test results artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: test-results-partial-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: junit.xml
|
||||
@@ -1558,7 +1562,7 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
@@ -1587,7 +1591,7 @@ jobs:
|
||||
&& needs.info.outputs.skip_coverage != 'true' && !cancelled()
|
||||
steps:
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
pattern: test-results-*
|
||||
- name: Upload test results to Codecov
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -28,11 +28,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/init@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@9e907b5e64f6b83e7804b09294d44122997950d6 # v4.32.3
|
||||
uses: github/codeql-action/analyze@89a39a4e59826350b863aa6b6252a07ad50cf83e # v4.32.4
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -236,7 +236,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
uses: actions/ai-inference@e09e65981758de8b2fdab13c2bfb7c7d5493b0b6 # v2.0.7
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -62,7 +62,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
uses: actions/ai-inference@e09e65981758de8b2fdab13c2bfb7c7d5493b0b6 # v2.0.7
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
7
.github/workflows/translations.yml
vendored
7
.github/workflows/translations.yml
vendored
@@ -15,9 +15,6 @@ concurrency:
|
||||
group: ${{ github.workflow }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
name: Upload
|
||||
@@ -29,10 +26,10 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
|
||||
- name: Upload Translations
|
||||
env:
|
||||
|
||||
25
.github/workflows/wheels.yml
vendored
25
.github/workflows/wheels.yml
vendored
@@ -16,9 +16,6 @@ on:
|
||||
- "requirements.txt"
|
||||
- "script/gen_requirements_all.py"
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.14.2"
|
||||
|
||||
permissions: {}
|
||||
|
||||
concurrency:
|
||||
@@ -36,11 +33,11 @@ jobs:
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version-file: ".python-version"
|
||||
check-latest: true
|
||||
|
||||
- name: Create Python virtual environment
|
||||
@@ -77,7 +74,7 @@ jobs:
|
||||
) > .env_file
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -85,7 +82,7 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -97,7 +94,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
|
||||
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -124,12 +121,12 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -175,17 +172,17 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
|
||||
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -209,4 +206,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txt"
|
||||
requirements: "requirements_all_wheels_${{ matrix.arch }}.txt"
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.14
|
||||
3.14.2
|
||||
|
||||
@@ -545,6 +545,7 @@ homeassistant.components.tcp.*
|
||||
homeassistant.components.technove.*
|
||||
homeassistant.components.tedee.*
|
||||
homeassistant.components.telegram_bot.*
|
||||
homeassistant.components.teslemetry.*
|
||||
homeassistant.components.text.*
|
||||
homeassistant.components.thethingsnetwork.*
|
||||
homeassistant.components.threshold.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -401,8 +401,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duckdns/ @tr4nt0r
|
||||
/tests/components/duckdns/ @tr4nt0r
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
/tests/components/duotecno/ @cereal2nd
|
||||
/homeassistant/components/dwd_weather_warnings/ @runningman84 @stephan192
|
||||
|
||||
31
Dockerfile
generated
31
Dockerfile
generated
@@ -1,19 +1,9 @@
|
||||
# Automatically generated by hassfest.
|
||||
#
|
||||
# To update, run python3 -m script.hassfest -p docker
|
||||
ARG BUILD_FROM
|
||||
ARG BUILD_FROM=ghcr.io/home-assistant/amd64-homeassistant-base:latest
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
LABEL \
|
||||
io.hass.type="core" \
|
||||
org.opencontainers.image.authors="The Home Assistant Authors" \
|
||||
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
|
||||
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
|
||||
org.opencontainers.image.licenses="Apache-2.0" \
|
||||
org.opencontainers.image.source="https://github.com/home-assistant/core" \
|
||||
org.opencontainers.image.title="Home Assistant" \
|
||||
org.opencontainers.image.url="https://www.home-assistant.io/"
|
||||
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
@@ -60,3 +50,22 @@ RUN \
|
||||
homeassistant/homeassistant
|
||||
|
||||
WORKDIR /config
|
||||
|
||||
ARG BUILD_ARCH=amd64
|
||||
ARG BUILD_DATE="1970-01-01 00:00:00+00:00"
|
||||
ARG BUILD_REPOSITORY
|
||||
ARG BUILD_VERSION=0.0.0-local
|
||||
|
||||
LABEL \
|
||||
io.hass.type="core" \
|
||||
io.hass.arch="${BUILD_ARCH}" \
|
||||
io.hass.version="${BUILD_VERSION}" \
|
||||
org.opencontainers.image.created="${BUILD_DATE}" \
|
||||
org.opencontainers.image.version="${BUILD_VERSION}" \
|
||||
org.opencontainers.image.source="${BUILD_REPOSITORY}" \
|
||||
org.opencontainers.image.authors="The Home Assistant Authors" \
|
||||
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
|
||||
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
|
||||
org.opencontainers.image.licenses="Apache-2.0" \
|
||||
org.opencontainers.image.title="Home Assistant" \
|
||||
org.opencontainers.image.url="https://www.home-assistant.io/"
|
||||
|
||||
@@ -70,7 +70,7 @@ from .const import (
|
||||
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
||||
)
|
||||
from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .exceptions import HomeAssistantError, UnsupportedStorageVersionError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
category_registry,
|
||||
@@ -239,6 +239,8 @@ DEFAULT_INTEGRATIONS = {
|
||||
}
|
||||
DEFAULT_INTEGRATIONS_RECOVERY_MODE = {
|
||||
# These integrations are set up if recovery mode is activated.
|
||||
"backup",
|
||||
"cloud",
|
||||
"frontend",
|
||||
}
|
||||
DEFAULT_INTEGRATIONS_SUPERVISOR = {
|
||||
@@ -433,32 +435,56 @@ def _init_blocking_io_modules_in_executor() -> None:
|
||||
is_docker_env()
|
||||
|
||||
|
||||
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
||||
"""Load the registries and modules that will do blocking I/O."""
|
||||
async def async_load_base_functionality(hass: core.HomeAssistant) -> bool:
|
||||
"""Load the registries and modules that will do blocking I/O.
|
||||
|
||||
Return whether loading succeeded.
|
||||
"""
|
||||
if DATA_REGISTRIES_LOADED in hass.data:
|
||||
return
|
||||
return True
|
||||
|
||||
hass.data[DATA_REGISTRIES_LOADED] = None
|
||||
entity.async_setup(hass)
|
||||
frame.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
translation.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
create_eager_task(category_registry.async_load(hass)),
|
||||
create_eager_task(device_registry.async_load(hass)),
|
||||
create_eager_task(entity_registry.async_load(hass)),
|
||||
create_eager_task(floor_registry.async_load(hass)),
|
||||
create_eager_task(issue_registry.async_load(hass)),
|
||||
create_eager_task(label_registry.async_load(hass)),
|
||||
hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
|
||||
create_eager_task(template.async_load_custom_templates(hass)),
|
||||
create_eager_task(restore_state.async_load(hass)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(condition.async_setup(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
|
||||
recovery = hass.config.recovery_mode
|
||||
try:
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(category_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(device_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(entity_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(floor_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(issue_registry.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(label_registry.async_load(hass, load_empty=recovery)),
|
||||
hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
|
||||
create_eager_task(template.async_load_custom_templates(hass)),
|
||||
create_eager_task(restore_state.async_load(hass, load_empty=recovery)),
|
||||
create_eager_task(hass.config_entries.async_initialize()),
|
||||
create_eager_task(async_get_system_info(hass)),
|
||||
create_eager_task(condition.async_setup(hass)),
|
||||
create_eager_task(trigger.async_setup(hass)),
|
||||
)
|
||||
except UnsupportedStorageVersionError as err:
|
||||
# If we're already in recovery mode, we don't want to handle the exception
|
||||
# and activate recovery mode again, as that would lead to an infinite loop.
|
||||
if recovery:
|
||||
raise
|
||||
|
||||
_LOGGER.error(
|
||||
"Storage file %s was created by a newer version of Home Assistant"
|
||||
" (storage version %s > %s); activating recovery mode; on-disk data"
|
||||
" is preserved; upgrade Home Assistant or restore from a backup",
|
||||
err.storage_key,
|
||||
err.found_version,
|
||||
err.max_supported_version,
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_from_config_dict(
|
||||
@@ -475,7 +501,9 @@ async def async_from_config_dict(
|
||||
# Prime custom component cache early so we know if registry entries are tied
|
||||
# to a custom integration
|
||||
await loader.async_get_custom_components(hass)
|
||||
await async_load_base_functionality(hass)
|
||||
|
||||
if not await async_load_base_functionality(hass):
|
||||
return None
|
||||
|
||||
# Set up core.
|
||||
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
|
||||
|
||||
5
homeassistant/brands/ubisys.json
Normal file
5
homeassistant/brands/ubisys.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "ubisys",
|
||||
"name": "Ubisys",
|
||||
"iot_standards": ["zigbee"]
|
||||
}
|
||||
@@ -191,7 +191,7 @@ class AccuWeatherEntity(
|
||||
{
|
||||
ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(),
|
||||
ATTR_FORECAST_CLOUD_COVERAGE: item["CloudCoverDay"],
|
||||
ATTR_FORECAST_HUMIDITY: item["RelativeHumidityDay"]["Average"],
|
||||
ATTR_FORECAST_HUMIDITY: item["RelativeHumidityDay"].get("Average"),
|
||||
ATTR_FORECAST_NATIVE_TEMP: item["TemperatureMax"][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_TEMP_LOW: item["TemperatureMin"][ATTR_VALUE],
|
||||
ATTR_FORECAST_NATIVE_APPARENT_TEMP: item["RealFeelTemperatureMax"][
|
||||
|
||||
@@ -93,7 +93,6 @@ class AirobotNumber(AirobotEntity, NumberEntity):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="set_value_failed",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
else:
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@@ -112,7 +112,7 @@
|
||||
"message": "Failed to set temperature to {temperature}."
|
||||
},
|
||||
"set_value_failed": {
|
||||
"message": "Failed to set value: {error}"
|
||||
"message": "Failed to set value."
|
||||
},
|
||||
"switch_turn_off_failed": {
|
||||
"message": "Failed to turn off {switch}."
|
||||
|
||||
@@ -44,7 +44,7 @@ def make_entity_state_trigger_required_features(
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_domains = {domain}
|
||||
_to_states = {to_state}
|
||||
_required_features = required_features
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Defines a base Alexa Devices entity."""
|
||||
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_MODEL
|
||||
from aioamazondevices.const.devices import SPEAKER_GROUP_DEVICE_TYPE
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -25,19 +25,20 @@ class AmazonEntity(CoordinatorEntity[AmazonDevicesCoordinator]):
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._serial_num = serial_num
|
||||
model_details = coordinator.api.get_model_details(self.device) or {}
|
||||
model = model_details.get("model")
|
||||
model = self.device.model
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, serial_num)},
|
||||
name=self.device.account_name,
|
||||
model=model,
|
||||
model_id=self.device.device_type,
|
||||
manufacturer=model_details.get("manufacturer", "Amazon"),
|
||||
hw_version=model_details.get("hw_version"),
|
||||
manufacturer=self.device.manufacturer or "Amazon",
|
||||
hw_version=self.device.hardware_version,
|
||||
sw_version=(
|
||||
self.device.software_version if model != SPEAKER_GROUP_MODEL else None
|
||||
self.device.software_version
|
||||
if model != SPEAKER_GROUP_DEVICE_TYPE
|
||||
else None
|
||||
),
|
||||
serial_number=serial_num if model != SPEAKER_GROUP_MODEL else None,
|
||||
serial_number=serial_num if model != SPEAKER_GROUP_DEVICE_TYPE else None,
|
||||
)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{serial_num}-{description.key}"
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==12.0.0"]
|
||||
"requirements": ["aioamazondevices==13.0.0"]
|
||||
}
|
||||
|
||||
@@ -400,8 +400,8 @@ def _convert_content(
|
||||
# If there is only one text block, simplify the content to a string
|
||||
messages[-1]["content"] = messages[-1]["content"][0]["text"]
|
||||
else:
|
||||
# Note: We don't pass SystemContent here as its passed to the API as the prompt
|
||||
raise TypeError(f"Unexpected content type: {type(content)}")
|
||||
# Note: We don't pass SystemContent here as it's passed to the API as the prompt
|
||||
raise HomeAssistantError("Unexpected content type in chat log")
|
||||
|
||||
return messages, container_id
|
||||
|
||||
@@ -442,8 +442,8 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
|
||||
Each message could contain multiple blocks of the same type.
|
||||
"""
|
||||
if stream is None:
|
||||
raise TypeError("Expected a stream of messages")
|
||||
if stream is None or not hasattr(stream, "__aiter__"):
|
||||
raise HomeAssistantError("Expected a stream of messages")
|
||||
|
||||
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None
|
||||
current_tool_args: str
|
||||
@@ -456,8 +456,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
LOGGER.debug("Received response: %s", response)
|
||||
|
||||
if isinstance(response, RawMessageStartEvent):
|
||||
if response.message.role != "assistant":
|
||||
raise ValueError("Unexpected message role")
|
||||
input_usage = response.message.usage
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockStartEvent):
|
||||
@@ -666,7 +664,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
system = chat_log.content[0]
|
||||
if not isinstance(system, conversation.SystemContent):
|
||||
raise TypeError("First message must be a system message")
|
||||
raise HomeAssistantError("First message must be a system message")
|
||||
|
||||
# System prompt with caching enabled
|
||||
system_prompt: list[TextBlockParam] = [
|
||||
|
||||
@@ -31,10 +31,7 @@ rules:
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: todo
|
||||
comment: |
|
||||
Reevaluate exceptions for entity services.
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
|
||||
@@ -117,6 +117,7 @@ class SharpAquosTVDevice(MediaPlayerEntity):
|
||||
| MediaPlayerEntityFeature.VOLUME_SET
|
||||
| MediaPlayerEntityFeature.PLAY
|
||||
)
|
||||
_attr_volume_step = 2 / 60
|
||||
|
||||
def __init__(
|
||||
self, name: str, remote: sharp_aquos_rc.TV, power_on_enabled: bool = False
|
||||
@@ -161,22 +162,6 @@ class SharpAquosTVDevice(MediaPlayerEntity):
|
||||
"""Turn off tvplayer."""
|
||||
self._remote.power(0)
|
||||
|
||||
@_retry
|
||||
def volume_up(self) -> None:
|
||||
"""Volume up the media player."""
|
||||
if self.volume_level is None:
|
||||
_LOGGER.debug("Unknown volume in volume_up")
|
||||
return
|
||||
self._remote.volume(int(self.volume_level * 60) + 2)
|
||||
|
||||
@_retry
|
||||
def volume_down(self) -> None:
|
||||
"""Volume down media player."""
|
||||
if self.volume_level is None:
|
||||
_LOGGER.debug("Unknown volume in volume_down")
|
||||
return
|
||||
self._remote.volume(int(self.volume_level * 60) - 2)
|
||||
|
||||
@_retry
|
||||
def set_volume_level(self, volume: float) -> None:
|
||||
"""Set Volume media player."""
|
||||
|
||||
@@ -30,5 +30,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.4"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.7"]
|
||||
}
|
||||
|
||||
@@ -61,7 +61,13 @@ class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]):
|
||||
frequency = self.client.measure(4)
|
||||
i_leak_dcdc = self.client.measure(6)
|
||||
i_leak_inverter = self.client.measure(7)
|
||||
power_in_1 = self.client.measure(8)
|
||||
power_in_2 = self.client.measure(9)
|
||||
temperature_c = self.client.measure(21)
|
||||
voltage_in_1 = self.client.measure(23)
|
||||
current_in_1 = self.client.measure(25)
|
||||
voltage_in_2 = self.client.measure(26)
|
||||
current_in_2 = self.client.measure(27)
|
||||
r_iso = self.client.measure(30)
|
||||
energy_wh = self.client.cumulated_energy(5)
|
||||
[alarm, *_] = self.client.alarms()
|
||||
@@ -87,7 +93,13 @@ class AuroraAbbDataUpdateCoordinator(DataUpdateCoordinator[dict[str, float]]):
|
||||
data["grid_frequency"] = round(frequency, 1)
|
||||
data["i_leak_dcdc"] = i_leak_dcdc
|
||||
data["i_leak_inverter"] = i_leak_inverter
|
||||
data["power_in_1"] = round(power_in_1, 1)
|
||||
data["power_in_2"] = round(power_in_2, 1)
|
||||
data["temp"] = round(temperature_c, 1)
|
||||
data["voltage_in_1"] = round(voltage_in_1, 1)
|
||||
data["current_in_1"] = round(current_in_1, 1)
|
||||
data["voltage_in_2"] = round(voltage_in_2, 1)
|
||||
data["current_in_2"] = round(current_in_2, 1)
|
||||
data["r_iso"] = r_iso
|
||||
data["totalenergy"] = round(energy_wh / 1000, 2)
|
||||
data["alarm"] = alarm
|
||||
|
||||
@@ -68,6 +68,7 @@ SENSOR_TYPES = [
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfFrequency.HERTZ,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="grid_frequency",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
@@ -88,6 +89,60 @@ SENSOR_TYPES = [
|
||||
translation_key="i_leak_inverter",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="power_in_1",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="power_in_1",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="power_in_2",
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="power_in_2",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="voltage_in_1",
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="voltage_in_1",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="current_in_1",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="current_in_1",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="voltage_in_2",
|
||||
device_class=SensorDeviceClass.VOLTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="voltage_in_2",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="current_in_2",
|
||||
device_class=SensorDeviceClass.CURRENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="current_in_2",
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="alarm",
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
|
||||
@@ -24,9 +24,18 @@
|
||||
"alarm": {
|
||||
"name": "Alarm status"
|
||||
},
|
||||
"current_in_1": {
|
||||
"name": "String 1 current"
|
||||
},
|
||||
"current_in_2": {
|
||||
"name": "String 2 current"
|
||||
},
|
||||
"grid_current": {
|
||||
"name": "Grid current"
|
||||
},
|
||||
"grid_frequency": {
|
||||
"name": "Grid frequency"
|
||||
},
|
||||
"grid_voltage": {
|
||||
"name": "Grid voltage"
|
||||
},
|
||||
@@ -36,6 +45,12 @@
|
||||
"i_leak_inverter": {
|
||||
"name": "Inverter leak current"
|
||||
},
|
||||
"power_in_1": {
|
||||
"name": "String 1 power"
|
||||
},
|
||||
"power_in_2": {
|
||||
"name": "String 2 power"
|
||||
},
|
||||
"power_output": {
|
||||
"name": "Power output"
|
||||
},
|
||||
@@ -44,6 +59,12 @@
|
||||
},
|
||||
"total_energy": {
|
||||
"name": "Total energy"
|
||||
},
|
||||
"voltage_in_1": {
|
||||
"name": "String 1 voltage"
|
||||
},
|
||||
"voltage_in_2": {
|
||||
"name": "String 2 voltage"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -149,6 +149,7 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"lock",
|
||||
"media_player",
|
||||
"person",
|
||||
"remote",
|
||||
"scene",
|
||||
"siren",
|
||||
"switch",
|
||||
|
||||
@@ -29,12 +29,17 @@ class StoredBackupData(TypedDict):
|
||||
class _BackupStore(Store[StoredBackupData]):
|
||||
"""Class to help storing backup data."""
|
||||
|
||||
# Maximum version we support reading for forward compatibility.
|
||||
# This allows reading data written by a newer HA version after downgrade.
|
||||
_MAX_READABLE_VERSION = 2
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize storage class."""
|
||||
super().__init__(
|
||||
hass,
|
||||
STORAGE_VERSION,
|
||||
STORAGE_KEY,
|
||||
max_readable_version=self._MAX_READABLE_VERSION,
|
||||
minor_version=STORAGE_VERSION_MINOR,
|
||||
)
|
||||
|
||||
@@ -86,8 +91,8 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
# data["config"]["schedule"]["state"] will be removed. The bump to 2 is
|
||||
# planned to happen after a 6 month quiet period with no minor version
|
||||
# changes.
|
||||
# Reject if major version is higher than 2.
|
||||
if old_major_version > 2:
|
||||
# Reject if major version is higher than _MAX_READABLE_VERSION.
|
||||
if old_major_version > self._MAX_READABLE_VERSION:
|
||||
raise NotImplementedError
|
||||
return data
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ class BinarySensorOnOffTrigger(EntityTargetStateTriggerBase):
|
||||
"""Class for binary sensor on/off triggers."""
|
||||
|
||||
_device_class: BinarySensorDeviceClass | None
|
||||
_domain: str = DOMAIN
|
||||
_domains = {DOMAIN}
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
|
||||
@@ -190,7 +190,7 @@ class BitcoinSensor(SensorEntity):
|
||||
elif sensor_type == "miners_revenue_usd":
|
||||
self._attr_native_value = f"{stats.miners_revenue_usd:.0f}"
|
||||
elif sensor_type == "btc_mined":
|
||||
self._attr_native_value = str(stats.btc_mined * 0.00000001)
|
||||
self._attr_native_value = str(stats.btc_mined * 1e-8)
|
||||
elif sensor_type == "trade_volume_usd":
|
||||
self._attr_native_value = f"{stats.trade_volume_usd:.1f}"
|
||||
elif sensor_type == "difficulty":
|
||||
@@ -208,13 +208,13 @@ class BitcoinSensor(SensorEntity):
|
||||
elif sensor_type == "blocks_size":
|
||||
self._attr_native_value = f"{stats.blocks_size:.1f}"
|
||||
elif sensor_type == "total_fees_btc":
|
||||
self._attr_native_value = f"{stats.total_fees_btc * 0.00000001:.2f}"
|
||||
self._attr_native_value = f"{stats.total_fees_btc * 1e-8:.2f}"
|
||||
elif sensor_type == "total_btc_sent":
|
||||
self._attr_native_value = f"{stats.total_btc_sent * 0.00000001:.2f}"
|
||||
self._attr_native_value = f"{stats.total_btc_sent * 1e-8:.2f}"
|
||||
elif sensor_type == "estimated_btc_sent":
|
||||
self._attr_native_value = f"{stats.estimated_btc_sent * 0.00000001:.2f}"
|
||||
self._attr_native_value = f"{stats.estimated_btc_sent * 1e-8:.2f}"
|
||||
elif sensor_type == "total_btc":
|
||||
self._attr_native_value = f"{stats.total_btc * 0.00000001:.2f}"
|
||||
self._attr_native_value = f"{stats.total_btc * 1e-8:.2f}"
|
||||
elif sensor_type == "total_blocks":
|
||||
self._attr_native_value = f"{stats.total_blocks:.0f}"
|
||||
elif sensor_type == "next_retarget":
|
||||
@@ -222,7 +222,7 @@ class BitcoinSensor(SensorEntity):
|
||||
elif sensor_type == "estimated_transaction_volume_usd":
|
||||
self._attr_native_value = f"{stats.estimated_transaction_volume_usd:.2f}"
|
||||
elif sensor_type == "miners_revenue_btc":
|
||||
self._attr_native_value = f"{stats.miners_revenue_btc * 0.00000001:.1f}"
|
||||
self._attr_native_value = f"{stats.miners_revenue_btc * 1e-8:.1f}"
|
||||
elif sensor_type == "market_price_usd":
|
||||
self._attr_native_value = f"{stats.market_price_usd:.2f}"
|
||||
|
||||
|
||||
@@ -85,6 +85,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
_attr_media_content_type = MediaType.MUSIC
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_volume_step = 0.01
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -688,24 +689,6 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
await self._player.play_url(url)
|
||||
|
||||
async def async_volume_up(self) -> None:
|
||||
"""Volume up the media player."""
|
||||
if self.volume_level is None:
|
||||
return
|
||||
|
||||
new_volume = self.volume_level + 0.01
|
||||
new_volume = min(1, new_volume)
|
||||
await self.async_set_volume_level(new_volume)
|
||||
|
||||
async def async_volume_down(self) -> None:
|
||||
"""Volume down the media player."""
|
||||
if self.volume_level is None:
|
||||
return
|
||||
|
||||
new_volume = self.volume_level - 0.01
|
||||
new_volume = max(0, new_volume)
|
||||
await self.async_set_volume_level(new_volume)
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Send volume_up command to media player."""
|
||||
volume = int(round(volume * 100))
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["python-bsblan==5.1.0"],
|
||||
"requirements": ["python-bsblan==5.1.1"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -14,7 +14,7 @@ from . import DOMAIN
|
||||
class ButtonPressedTrigger(EntityTriggerBase):
|
||||
"""Trigger for button entity presses."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_domains = {DOMAIN}
|
||||
_schema = ENTITY_STATE_TRIGGER_SCHEMA
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
|
||||
@@ -29,6 +29,12 @@
|
||||
"early_update": {
|
||||
"default": "mdi:update"
|
||||
},
|
||||
"equalizer": {
|
||||
"default": "mdi:equalizer",
|
||||
"state": {
|
||||
"off": "mdi:equalizer-outline"
|
||||
}
|
||||
},
|
||||
"pre_amp": {
|
||||
"default": "mdi:volume-high",
|
||||
"state": {
|
||||
|
||||
@@ -65,6 +65,9 @@
|
||||
"early_update": {
|
||||
"name": "Early update"
|
||||
},
|
||||
"equalizer": {
|
||||
"name": "Equalizer"
|
||||
},
|
||||
"pre_amp": {
|
||||
"name": "Pre-Amp"
|
||||
},
|
||||
|
||||
@@ -33,6 +33,13 @@ def room_correction_enabled(client: StreamMagicClient) -> bool:
|
||||
return client.audio.tilt_eq.enabled
|
||||
|
||||
|
||||
def equalizer_enabled(client: StreamMagicClient) -> bool:
|
||||
"""Check if equalizer is enabled."""
|
||||
if TYPE_CHECKING:
|
||||
assert client.audio.user_eq is not None
|
||||
return client.audio.user_eq.enabled
|
||||
|
||||
|
||||
CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
|
||||
CambridgeAudioSwitchEntityDescription(
|
||||
key="pre_amp",
|
||||
@@ -56,6 +63,14 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSwitchEntityDescription, ...] = (
|
||||
value_fn=room_correction_enabled,
|
||||
set_value_fn=lambda client, value: client.set_room_correction_mode(value),
|
||||
),
|
||||
CambridgeAudioSwitchEntityDescription(
|
||||
key="equalizer",
|
||||
translation_key="equalizer",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
load_fn=lambda client: client.audio.user_eq is not None,
|
||||
value_fn=equalizer_enabled,
|
||||
set_value_fn=lambda client, value: client.set_equalizer_mode(value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -804,9 +804,24 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
@property
|
||||
def state(self) -> MediaPlayerState | None:
|
||||
"""Return the state of the player."""
|
||||
# The lovelace app loops media to prevent timing out, don't show that
|
||||
if (chromecast := self._chromecast) is None or (
|
||||
cast_status := self.cast_status
|
||||
) is None:
|
||||
# Not connected to any chromecast, or not yet got any status
|
||||
return None
|
||||
|
||||
if (
|
||||
chromecast.cast_type == pychromecast.const.CAST_TYPE_CHROMECAST
|
||||
and not chromecast.ignore_cec
|
||||
and cast_status.is_active_input is False
|
||||
):
|
||||
# The display interface for the device has been turned off or switched away
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:
|
||||
# The lovelace app loops media to prevent timing out, don't show that
|
||||
return MediaPlayerState.PLAYING
|
||||
|
||||
if (media_status := self._media_status()[0]) is not None:
|
||||
if media_status.player_state == MEDIA_PLAYER_STATE_PLAYING:
|
||||
return MediaPlayerState.PLAYING
|
||||
@@ -817,20 +832,16 @@ class CastMediaPlayerEntity(CastDevice, MediaPlayerEntity):
|
||||
if media_status.player_is_idle:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
if self._chromecast is not None and self._chromecast.is_idle:
|
||||
# If library consider us idle, that is our off state
|
||||
# it takes HDMI status into account for cast devices.
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.app_id in APP_IDS_UNRELIABLE_MEDIA_INFO:
|
||||
# Some apps don't report media status, show the player as playing
|
||||
return MediaPlayerState.PLAYING
|
||||
|
||||
if self.app_id is not None:
|
||||
# We have an active app
|
||||
return MediaPlayerState.IDLE
|
||||
if self.app_id in (pychromecast.IDLE_APP_ID, None):
|
||||
# We have no active app or the home screen app. This is
|
||||
# same app as APP_BACKDROP.
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
return None
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
@property
|
||||
def media_content_id(self) -> str | None:
|
||||
|
||||
@@ -43,7 +43,7 @@ HVAC_MODE_CHANGED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend
|
||||
class HVACModeChangedTrigger(EntityTargetStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_domains = {DOMAIN}
|
||||
_schema = HVAC_MODE_CHANGED_TRIGGER_SCHEMA
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
|
||||
@@ -48,6 +48,8 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
vol.Optional("conversation_id"): vol.Any(str, None),
|
||||
vol.Optional("language"): str,
|
||||
vol.Optional("agent_id"): agent_id_validator,
|
||||
vol.Optional("device_id"): vol.Any(str, None),
|
||||
vol.Optional("satellite_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@@ -64,6 +66,8 @@ async def websocket_process(
|
||||
context=connection.context(msg),
|
||||
language=msg.get("language"),
|
||||
agent_id=msg.get("agent_id"),
|
||||
device_id=msg.get("device_id"),
|
||||
satellite_id=msg.get("satellite_id"),
|
||||
)
|
||||
connection.send_result(msg["id"], result.as_dict())
|
||||
|
||||
@@ -248,6 +252,8 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
vol.Optional("conversation_id"): str,
|
||||
vol.Optional("language"): str,
|
||||
vol.Optional("agent_id"): agent_id_validator,
|
||||
vol.Optional("device_id"): vol.Any(str, None),
|
||||
vol.Optional("satellite_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -262,6 +268,8 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
context=self.context(request),
|
||||
language=data.get("language"),
|
||||
agent_id=data.get("agent_id"),
|
||||
device_id=data.get("device_id"),
|
||||
satellite_id=data.get("satellite_id"),
|
||||
)
|
||||
|
||||
return self.json(result.as_dict())
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.2.13"]
|
||||
"requirements": ["hassil==3.5.0", "home-assistant-intents==2026.3.3"]
|
||||
}
|
||||
|
||||
@@ -91,6 +91,7 @@ class CoverEntityFeature(IntFlag):
|
||||
|
||||
ATTR_CURRENT_POSITION = "current_position"
|
||||
ATTR_CURRENT_TILT_POSITION = "current_tilt_position"
|
||||
ATTR_IS_CLOSED = "is_closed"
|
||||
ATTR_POSITION = "position"
|
||||
ATTR_TILT_POSITION = "tilt_position"
|
||||
|
||||
@@ -267,7 +268,9 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
@property
|
||||
def state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the state attributes."""
|
||||
data = {}
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
data[ATTR_IS_CLOSED] = self.is_closed
|
||||
|
||||
if (current := self.current_cover_position) is not None:
|
||||
data[ATTR_CURRENT_POSITION] = current
|
||||
|
||||
@@ -112,11 +112,12 @@ def _zone_is_configured(zone: DaikinZone) -> bool:
|
||||
|
||||
def _zone_temperature_lists(device: Appliance) -> tuple[list[str], list[str]]:
|
||||
"""Return the decoded zone temperature lists."""
|
||||
try:
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
except AttributeError, KeyError:
|
||||
values = device.values
|
||||
if DAIKIN_ZONE_TEMP_HEAT not in values or DAIKIN_ZONE_TEMP_COOL not in values:
|
||||
return ([], [])
|
||||
|
||||
heating = device.represent(DAIKIN_ZONE_TEMP_HEAT)[1]
|
||||
cooling = device.represent(DAIKIN_ZONE_TEMP_COOL)[1]
|
||||
return (list(heating or []), list(cooling or []))
|
||||
|
||||
|
||||
|
||||
@@ -139,18 +139,6 @@ class AbstractDemoPlayer(MediaPlayerEntity):
|
||||
self._attr_is_volume_muted = mute
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def volume_up(self) -> None:
|
||||
"""Increase volume."""
|
||||
assert self.volume_level is not None
|
||||
self._attr_volume_level = min(1.0, self.volume_level + 0.1)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def volume_down(self) -> None:
|
||||
"""Decrease volume."""
|
||||
assert self.volume_level is not None
|
||||
self._attr_volume_level = max(0.0, self.volume_level - 0.1)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
def set_volume_level(self, volume: float) -> None:
|
||||
"""Set the volume level, range 0..1."""
|
||||
self._attr_volume_level = volume
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host"
|
||||
"host": "[%key:common::config_flow::data::host%]"
|
||||
},
|
||||
"description": "Please enter the host name or IP address of the Devialet device."
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["dsmr_parser"],
|
||||
"requirements": ["dsmr-parser==1.4.3"]
|
||||
"requirements": ["dsmr-parser==1.5.0"]
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
"""The Duke Energy integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import DukeEnergyConfigEntry, DukeEnergyCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DukeEnergyConfigEntry) -> bool:
|
||||
"""Set up Duke Energy from a config entry."""
|
||||
|
||||
coordinator = DukeEnergyCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DukeEnergyConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
@@ -1,67 +0,0 @@
|
||||
"""Config flow for Duke Energy integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiodukeenergy import DukeEnergy
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DukeEnergyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Duke Energy."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
api = DukeEnergy(
|
||||
user_input[CONF_USERNAME], user_input[CONF_PASSWORD], session
|
||||
)
|
||||
try:
|
||||
auth = await api.authenticate()
|
||||
except ClientResponseError as e:
|
||||
errors["base"] = "invalid_auth" if e.status == 404 else "cannot_connect"
|
||||
except ClientError, TimeoutError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
username = auth["internalUserID"].lower()
|
||||
await self.async_set_unique_id(username)
|
||||
self._abort_if_unique_id_configured()
|
||||
email = auth["loginEmailAddress"].lower()
|
||||
data = {
|
||||
CONF_EMAIL: email,
|
||||
CONF_USERNAME: username,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
self._async_abort_entries_match(data)
|
||||
return self.async_create_entry(title=email, data=data)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
@@ -1,3 +0,0 @@
|
||||
"""Constants for the Duke Energy integration."""
|
||||
|
||||
DOMAIN = "duke_energy"
|
||||
@@ -1,222 +0,0 @@
|
||||
"""Coordinator to handle Duke Energy connections."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from aiodukeenergy import DukeEnergy
|
||||
from aiohttp import ClientError
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, UnitOfEnergy, UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import EnergyConverter
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_SUPPORTED_METER_TYPES = ("ELECTRIC",)
|
||||
|
||||
type DukeEnergyConfigEntry = ConfigEntry[DukeEnergyCoordinator]
|
||||
|
||||
|
||||
class DukeEnergyCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Handle inserting statistics."""
|
||||
|
||||
config_entry: DukeEnergyConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: DukeEnergyConfigEntry
|
||||
) -> None:
|
||||
"""Initialize the data handler."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name="Duke Energy",
|
||||
# Data is updated daily on Duke Energy.
|
||||
# Refresh every 12h to be at most 12h behind.
|
||||
update_interval=timedelta(hours=12),
|
||||
)
|
||||
self.api = DukeEnergy(
|
||||
config_entry.data[CONF_USERNAME],
|
||||
config_entry.data[CONF_PASSWORD],
|
||||
async_get_clientsession(hass),
|
||||
)
|
||||
self._statistic_ids: set = set()
|
||||
|
||||
@callback
|
||||
def _dummy_listener() -> None:
|
||||
pass
|
||||
|
||||
# Force the coordinator to periodically update by registering at least one listener.
|
||||
# Duke Energy does not provide forecast data, so all information is historical.
|
||||
# This makes _async_update_data get periodically called so we can insert statistics.
|
||||
self.async_add_listener(_dummy_listener)
|
||||
|
||||
self.config_entry.async_on_unload(self._clear_statistics)
|
||||
|
||||
def _clear_statistics(self) -> None:
|
||||
"""Clear statistics."""
|
||||
get_instance(self.hass).async_clear_statistics(list(self._statistic_ids))
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Insert Duke Energy statistics."""
|
||||
meters: dict[str, dict[str, Any]] = await self.api.get_meters()
|
||||
for serial_number, meter in meters.items():
|
||||
if (
|
||||
not isinstance(meter["serviceType"], str)
|
||||
or meter["serviceType"] not in _SUPPORTED_METER_TYPES
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Skipping unsupported meter type %s", meter["serviceType"]
|
||||
)
|
||||
continue
|
||||
|
||||
id_prefix = f"{meter['serviceType'].lower()}_{serial_number}"
|
||||
consumption_statistic_id = f"{DOMAIN}:{id_prefix}_energy_consumption"
|
||||
self._statistic_ids.add(consumption_statistic_id)
|
||||
_LOGGER.debug(
|
||||
"Updating Statistics for %s",
|
||||
consumption_statistic_id,
|
||||
)
|
||||
|
||||
last_stat = await get_instance(self.hass).async_add_executor_job(
|
||||
get_last_statistics, self.hass, 1, consumption_statistic_id, True, set()
|
||||
)
|
||||
if not last_stat:
|
||||
_LOGGER.debug("Updating statistic for the first time")
|
||||
usage = await self._async_get_energy_usage(meter)
|
||||
consumption_sum = 0.0
|
||||
last_stats_time = None
|
||||
else:
|
||||
usage = await self._async_get_energy_usage(
|
||||
meter,
|
||||
last_stat[consumption_statistic_id][0]["start"],
|
||||
)
|
||||
if not usage:
|
||||
_LOGGER.debug("No recent usage data. Skipping update")
|
||||
continue
|
||||
stats = await get_instance(self.hass).async_add_executor_job(
|
||||
statistics_during_period,
|
||||
self.hass,
|
||||
min(usage.keys()),
|
||||
None,
|
||||
{consumption_statistic_id},
|
||||
"hour",
|
||||
None,
|
||||
{"sum"},
|
||||
)
|
||||
consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"])
|
||||
last_stats_time = stats[consumption_statistic_id][0]["start"]
|
||||
|
||||
consumption_statistics = []
|
||||
|
||||
for start, data in usage.items():
|
||||
if last_stats_time is not None and start.timestamp() <= last_stats_time:
|
||||
continue
|
||||
consumption_sum += data["energy"]
|
||||
|
||||
consumption_statistics.append(
|
||||
StatisticData(
|
||||
start=start, state=data["energy"], sum=consumption_sum
|
||||
)
|
||||
)
|
||||
|
||||
name_prefix = (
|
||||
f"Duke Energy {meter['serviceType'].capitalize()} {serial_number}"
|
||||
)
|
||||
consumption_metadata = StatisticMetaData(
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{name_prefix} Consumption",
|
||||
source=DOMAIN,
|
||||
statistic_id=consumption_statistic_id,
|
||||
unit_class=EnergyConverter.UNIT_CLASS,
|
||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR
|
||||
if meter["serviceType"] == "ELECTRIC"
|
||||
else UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Adding %s statistics for %s",
|
||||
len(consumption_statistics),
|
||||
consumption_statistic_id,
|
||||
)
|
||||
async_add_external_statistics(
|
||||
self.hass, consumption_metadata, consumption_statistics
|
||||
)
|
||||
|
||||
async def _async_get_energy_usage(
|
||||
self, meter: dict[str, Any], start_time: float | None = None
|
||||
) -> dict[datetime, dict[str, float | int]]:
|
||||
"""Get energy usage.
|
||||
|
||||
If start_time is None, get usage since account activation (or as far back as possible),
|
||||
otherwise since start_time - 30 days to allow corrections in data.
|
||||
|
||||
Duke Energy provides hourly data all the way back to ~3 years.
|
||||
"""
|
||||
|
||||
# All of Duke Energy Service Areas are currently in America/New_York timezone
|
||||
# May need to re-think this if that ever changes and determine timezone based
|
||||
# on the service address somehow.
|
||||
tz = await dt_util.async_get_time_zone("America/New_York")
|
||||
lookback = timedelta(days=30)
|
||||
one = timedelta(days=1)
|
||||
if start_time is None:
|
||||
# Max 3 years of data
|
||||
start = dt_util.now(tz) - timedelta(days=3 * 365)
|
||||
else:
|
||||
start = datetime.fromtimestamp(start_time, tz=tz) - lookback
|
||||
agreement_date = dt_util.parse_datetime(meter["agreementActiveDate"])
|
||||
if agreement_date is not None:
|
||||
start = max(agreement_date.replace(tzinfo=tz), start)
|
||||
|
||||
start = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
end = dt_util.now(tz).replace(hour=0, minute=0, second=0, microsecond=0) - one
|
||||
_LOGGER.debug("Data lookup range: %s - %s", start, end)
|
||||
|
||||
start_step = max(end - lookback, start)
|
||||
end_step = end
|
||||
usage: dict[datetime, dict[str, float | int]] = {}
|
||||
while True:
|
||||
_LOGGER.debug("Getting hourly usage: %s - %s", start_step, end_step)
|
||||
try:
|
||||
# Get data
|
||||
results = await self.api.get_energy_usage(
|
||||
meter["serialNum"], "HOURLY", "DAY", start_step, end_step
|
||||
)
|
||||
usage = {**results["data"], **usage}
|
||||
|
||||
for missing in results["missing"]:
|
||||
_LOGGER.debug("Missing data: %s", missing)
|
||||
|
||||
# Set next range
|
||||
end_step = start_step - one
|
||||
start_step = max(start_step - lookback, start)
|
||||
|
||||
# Make sure we don't go back too far
|
||||
if end_step < start:
|
||||
break
|
||||
except TimeoutError, ClientError:
|
||||
# ClientError is raised when there is no more data for the range
|
||||
break
|
||||
|
||||
_LOGGER.debug("Got %s meter usage reads", len(usage))
|
||||
return usage
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "duke_energy",
|
||||
"name": "Duke Energy",
|
||||
"codeowners": ["@hunterjm"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["recorder"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/duke_energy",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["aiodukeenergy==0.3.0"]
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,39 @@
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import EafmConfigEntry, EafmCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
def _fix_device_registry_identifiers(
|
||||
hass: HomeAssistant, entry: EafmConfigEntry
|
||||
) -> None:
|
||||
"""Fix invalid identifiers in device registry.
|
||||
|
||||
Added in 2026.4, can be removed in 2026.10 or later.
|
||||
"""
|
||||
device_registry = dr.async_get(hass)
|
||||
for device_entry in dr.async_entries_for_config_entry(
|
||||
device_registry, entry.entry_id
|
||||
):
|
||||
old_identifier = (DOMAIN, "measure-id", entry.data["station"])
|
||||
if old_identifier not in device_entry.identifiers: # type: ignore[comparison-overlap]
|
||||
continue
|
||||
new_identifiers = device_entry.identifiers.copy()
|
||||
new_identifiers.discard(old_identifier) # type: ignore[arg-type]
|
||||
new_identifiers.add((DOMAIN, entry.data["station"]))
|
||||
device_registry.async_update_device(
|
||||
device_entry.id, new_identifiers=new_identifiers
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EafmConfigEntry) -> bool:
|
||||
"""Set up flood monitoring sensors for this config entry."""
|
||||
_fix_device_registry_identifiers(hass, entry)
|
||||
coordinator = EafmCoordinator(hass, entry=entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
@@ -94,11 +94,11 @@ class Measurement(CoordinatorEntity, SensorEntity):
|
||||
return self.coordinator.data["measures"][self.key]["parameterName"]
|
||||
|
||||
@property
|
||||
def device_info(self):
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device info."""
|
||||
return DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, "measure-id", self.station_id)},
|
||||
identifiers={(DOMAIN, self.station_id)},
|
||||
manufacturer="https://environment.data.gov.uk/",
|
||||
model=self.parameter_name,
|
||||
name=f"{self.station_name} {self.parameter_name} {self.qualifier}",
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.12.4"]
|
||||
"requirements": ["env-canada==0.13.2"]
|
||||
}
|
||||
|
||||
@@ -524,14 +524,10 @@ class EsphomeAssistSatellite(
|
||||
self._active_pipeline_index = 0
|
||||
|
||||
maybe_pipeline_index = 0
|
||||
while True:
|
||||
if not (ww_entity_id := self.get_wake_word_entity(maybe_pipeline_index)):
|
||||
break
|
||||
|
||||
if not (ww_state := self.hass.states.get(ww_entity_id)):
|
||||
continue
|
||||
|
||||
if ww_state.state == wake_word_phrase:
|
||||
while ww_entity_id := self.get_wake_word_entity(maybe_pipeline_index):
|
||||
if (
|
||||
ww_state := self.hass.states.get(ww_entity_id)
|
||||
) and ww_state.state == wake_word_phrase:
|
||||
# First match
|
||||
self._active_pipeline_index = maybe_pipeline_index
|
||||
break
|
||||
|
||||
@@ -189,6 +189,7 @@ async def platform_async_setup_entry(
|
||||
info_type: type[_InfoT],
|
||||
entity_type: type[_EntityT],
|
||||
state_type: type[_StateT],
|
||||
info_filter: Callable[[_InfoT], bool] | None = None,
|
||||
) -> None:
|
||||
"""Set up an esphome platform.
|
||||
|
||||
@@ -208,10 +209,22 @@ async def platform_async_setup_entry(
|
||||
entity_type,
|
||||
state_type,
|
||||
)
|
||||
|
||||
if info_filter is not None:
|
||||
|
||||
def on_filtered_update(infos: list[EntityInfo]) -> None:
|
||||
on_static_info_update(
|
||||
[info for info in infos if info_filter(cast(_InfoT, info))]
|
||||
)
|
||||
|
||||
info_callback = on_filtered_update
|
||||
else:
|
||||
info_callback = on_static_info_update
|
||||
|
||||
entry_data.cleanup_callbacks.append(
|
||||
entry_data.async_register_static_info_callback(
|
||||
info_type,
|
||||
on_static_info_update,
|
||||
info_callback,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ from aioesphomeapi import (
|
||||
Event,
|
||||
EventInfo,
|
||||
FanInfo,
|
||||
InfraredInfo,
|
||||
LightInfo,
|
||||
LockInfo,
|
||||
MediaPlayerInfo,
|
||||
@@ -85,6 +86,7 @@ INFO_TYPE_TO_PLATFORM: dict[type[EntityInfo], Platform] = {
|
||||
DateTimeInfo: Platform.DATETIME,
|
||||
EventInfo: Platform.EVENT,
|
||||
FanInfo: Platform.FAN,
|
||||
InfraredInfo: Platform.INFRARED,
|
||||
LightInfo: Platform.LIGHT,
|
||||
LockInfo: Platform.LOCK,
|
||||
MediaPlayerInfo: Platform.MEDIA_PLAYER,
|
||||
|
||||
59
homeassistant/components/esphome/infrared.py
Normal file
59
homeassistant/components/esphome/infrared.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Infrared platform for ESPHome."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
|
||||
from aioesphomeapi import EntityState, InfraredCapability, InfraredInfo
|
||||
|
||||
from homeassistant.components.infrared import InfraredCommand, InfraredEntity
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .entity import (
|
||||
EsphomeEntity,
|
||||
convert_api_error_ha_error,
|
||||
platform_async_setup_entry,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class EsphomeInfraredEntity(EsphomeEntity[InfraredInfo, EntityState], InfraredEntity):
|
||||
"""ESPHome infrared entity using native API."""
|
||||
|
||||
@callback
|
||||
def _on_device_update(self) -> None:
|
||||
"""Call when device updates or entry data changes."""
|
||||
super()._on_device_update()
|
||||
if self._entry_data.available:
|
||||
# Infrared entities should go available as soon as the device comes online
|
||||
self.async_write_ha_state()
|
||||
|
||||
@convert_api_error_ha_error
|
||||
async def async_send_command(self, command: InfraredCommand) -> None:
|
||||
"""Send an IR command."""
|
||||
timings = [
|
||||
interval
|
||||
for timing in command.get_raw_timings()
|
||||
for interval in (timing.high_us, -timing.low_us)
|
||||
]
|
||||
_LOGGER.debug("Sending command: %s", timings)
|
||||
|
||||
self._client.infrared_rf_transmit_raw_timings(
|
||||
self._static_info.key,
|
||||
carrier_frequency=command.modulation,
|
||||
timings=timings,
|
||||
device_id=self._static_info.device_id,
|
||||
)
|
||||
|
||||
|
||||
async_setup_entry = partial(
|
||||
platform_async_setup_entry,
|
||||
info_type=InfraredInfo,
|
||||
entity_type=EsphomeInfraredEntity,
|
||||
state_type=EntityState,
|
||||
info_filter=lambda info: bool(info.capabilities & InfraredCapability.TRANSMITTER),
|
||||
)
|
||||
@@ -241,7 +241,7 @@ class EsphomeLight(EsphomeEntity[LightInfo, LightState], LightEntity):
|
||||
|
||||
if (color_temp_k := kwargs.get(ATTR_COLOR_TEMP_KELVIN)) is not None:
|
||||
# Do not use kelvin_to_mired here to prevent precision loss
|
||||
data["color_temperature"] = 1000000.0 / color_temp_k
|
||||
data["color_temperature"] = 1_000_000.0 / color_temp_k
|
||||
if color_temp_modes := _filter_color_modes(
|
||||
color_modes, LightColorCapability.COLOR_TEMPERATURE
|
||||
):
|
||||
|
||||
@@ -275,8 +275,11 @@ class FibaroController:
|
||||
# otherwise add the first visible device in the group
|
||||
# which is a hack, but solves a problem with FGT having
|
||||
# hidden compatibility devices before the real device
|
||||
if last_climate_parent != device.parent_fibaro_id or (
|
||||
device.has_endpoint_id and last_endpoint != device.endpoint_id
|
||||
# Second hack is for quickapps which have parent id 0 and no children
|
||||
if (
|
||||
last_climate_parent != device.parent_fibaro_id
|
||||
or (device.has_endpoint_id and last_endpoint != device.endpoint_id)
|
||||
or device.parent_fibaro_id == 0
|
||||
):
|
||||
_LOGGER.debug("Handle separately")
|
||||
self.fibaro_devices[platform].append(device)
|
||||
|
||||
@@ -154,7 +154,7 @@
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_fireplace_switch": {
|
||||
"description": "The fireplace mode switch entity `{entity_id}` is deprecated and will be removed in a future version.\n\nFireplace mode has been moved to a climate preset on the climate entity to better match the device interface.\n\nPlease update your automations to use the `climate.set_preset_mode` action with preset mode `fireplace` instead of using the switch entity.\n\nAfter updating your automations, you can safely disable this switch entity.",
|
||||
"description": "The fireplace mode switch entity `{entity_id}` is deprecated and will be removed in Home Assistant 2026.9.\n\nFireplace mode has been moved to a climate preset on the climate entity to better match the device interface.\n\nPlease update your automations to use the `climate.set_preset_mode` action with preset mode `fireplace` instead of using the switch entity.\n\nAfter updating your automations, you can safely disable this switch entity.",
|
||||
"title": "Fireplace mode switch is deprecated"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,6 +91,7 @@ async def async_setup_entry(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_switch_{fireplace_switch_unique_id}",
|
||||
breaks_in_ha_version="2026.9.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
@@ -102,7 +103,7 @@ async def async_setup_entry(
|
||||
entities.append(FlexitSwitch(coordinator, description))
|
||||
else:
|
||||
entities.append(FlexitSwitch(coordinator, description))
|
||||
async_add_entities(entities)
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -21,5 +21,5 @@
|
||||
"integration_type": "system",
|
||||
"preview_features": { "winter_mode": {} },
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260226.0"]
|
||||
"requirements": ["home-assistant-frontend==20260304.0"]
|
||||
}
|
||||
|
||||
@@ -14,5 +14,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"mqtt": ["fully/deviceInfo/+"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-fullykiosk==0.0.14"]
|
||||
"requirements": ["python-fullykiosk==0.0.15"]
|
||||
}
|
||||
|
||||
@@ -78,6 +78,12 @@ query ($owner: String!, $repository: String!) {
|
||||
number
|
||||
}
|
||||
}
|
||||
merged_pull_request: pullRequests(
|
||||
first:1
|
||||
states: MERGED
|
||||
) {
|
||||
total: totalCount
|
||||
}
|
||||
release: latestRelease {
|
||||
name
|
||||
url
|
||||
|
||||
@@ -28,6 +28,9 @@
|
||||
"latest_tag": {
|
||||
"default": "mdi:tag"
|
||||
},
|
||||
"merged_pulls_count": {
|
||||
"default": "mdi:source-merge"
|
||||
},
|
||||
"pulls_count": {
|
||||
"default": "mdi:source-pull"
|
||||
},
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aiogithubapi"],
|
||||
"requirements": ["aiogithubapi==24.6.0"]
|
||||
"requirements": ["aiogithubapi==26.0.0"]
|
||||
}
|
||||
|
||||
@@ -75,6 +75,13 @@ SENSOR_DESCRIPTIONS: tuple[GitHubSensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda data: data["pull_request"]["total"],
|
||||
),
|
||||
GitHubSensorEntityDescription(
|
||||
key="merged_pulls_count",
|
||||
translation_key="merged_pulls_count",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
value_fn=lambda data: data["merged_pull_request"]["total"],
|
||||
),
|
||||
GitHubSensorEntityDescription(
|
||||
key="latest_commit",
|
||||
translation_key="latest_commit",
|
||||
|
||||
@@ -48,6 +48,10 @@
|
||||
"latest_tag": {
|
||||
"name": "Latest tag"
|
||||
},
|
||||
"merged_pulls_count": {
|
||||
"name": "Merged pull requests",
|
||||
"unit_of_measurement": "pull requests"
|
||||
},
|
||||
"pulls_count": {
|
||||
"name": "Pull requests",
|
||||
"unit_of_measurement": "pull requests"
|
||||
|
||||
@@ -54,6 +54,10 @@
|
||||
"connectable": false,
|
||||
"local_name": "GVH5110*"
|
||||
},
|
||||
{
|
||||
"connectable": false,
|
||||
"local_name": "GV5140*"
|
||||
},
|
||||
{
|
||||
"connectable": false,
|
||||
"manufacturer_id": 1,
|
||||
@@ -140,5 +144,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/govee_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["govee-ble==0.44.0"]
|
||||
"requirements": ["govee-ble==1.2.0"]
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
PERCENTAGE,
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
UnitOfTemperature,
|
||||
@@ -72,6 +73,12 @@ SENSOR_DESCRIPTIONS = {
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
(DeviceClass.CO2, Units.CONCENTRATION_PARTS_PER_MILLION): SensorEntityDescription(
|
||||
key=f"{DeviceClass.CO2}_{Units.CONCENTRATION_PARTS_PER_MILLION}",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -10,5 +10,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["apyhiveapi"],
|
||||
"requirements": ["pyhive-integration==1.0.7"]
|
||||
"requirements": ["pyhive-integration==1.0.8"]
|
||||
}
|
||||
|
||||
@@ -88,6 +88,17 @@ class HomematicipHeatingGroup(HomematicipGenericEntity, ClimateEntity):
|
||||
if device.actualTemperature is None:
|
||||
self._simple_heating = self._first_radiator_thermostat
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Heating group available.
|
||||
|
||||
A heating group must be available, and should not be affected by the
|
||||
individual availability of group members.
|
||||
This allows controlling the temperature even when individual group
|
||||
members are not available.
|
||||
"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return device specific attributes."""
|
||||
|
||||
@@ -312,6 +312,17 @@ class HomematicipCoverShutterGroup(HomematicipGenericEntity, CoverEntity):
|
||||
device.modelType = f"HmIP-{post}"
|
||||
super().__init__(hap, device, post, is_multi_channel=False)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Cover shutter group available.
|
||||
|
||||
A cover shutter group must be available, and should not be affected by
|
||||
the individual availability of group members.
|
||||
This allows controlling the shutters even when individual group
|
||||
members are not available.
|
||||
"""
|
||||
return True
|
||||
|
||||
@property
|
||||
def current_cover_position(self) -> int | None:
|
||||
"""Return current position of cover."""
|
||||
|
||||
@@ -610,6 +610,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = (
|
||||
key="active_liter_lpm",
|
||||
translation_key="active_liter_lpm",
|
||||
native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE,
|
||||
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
has_fn=lambda data: data.measurement.active_liter_lpm is not None,
|
||||
value_fn=lambda data: data.measurement.active_liter_lpm,
|
||||
|
||||
@@ -43,6 +43,7 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant, State, callback
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, state as state_helper
|
||||
from homeassistant.helpers.entity_values import EntityValues
|
||||
@@ -61,6 +62,7 @@ from .const import (
|
||||
CLIENT_ERROR_V2,
|
||||
CODE_INVALID_INPUTS,
|
||||
COMPONENT_CONFIG_SCHEMA_CONNECTION,
|
||||
COMPONENT_CONFIG_SCHEMA_CONNECTION_VALIDATORS,
|
||||
CONF_API_VERSION,
|
||||
CONF_BUCKET,
|
||||
CONF_COMPONENT_CONFIG,
|
||||
@@ -79,7 +81,6 @@ from .const import (
|
||||
CONF_TAGS_ATTRIBUTES,
|
||||
CONNECTION_ERROR,
|
||||
DEFAULT_API_VERSION,
|
||||
DEFAULT_HOST,
|
||||
DEFAULT_HOST_V2,
|
||||
DEFAULT_MEASUREMENT_ATTR,
|
||||
DEFAULT_SSL_V2,
|
||||
@@ -104,6 +105,7 @@ from .const import (
|
||||
WRITE_ERROR,
|
||||
WROTE_MESSAGE,
|
||||
)
|
||||
from .issue import async_create_deprecated_yaml_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -137,7 +139,7 @@ def create_influx_url(conf: dict) -> dict:
|
||||
|
||||
def validate_version_specific_config(conf: dict) -> dict:
|
||||
"""Ensure correct config fields are provided based on API version used."""
|
||||
if conf[CONF_API_VERSION] == API_VERSION_2:
|
||||
if conf.get(CONF_API_VERSION, DEFAULT_API_VERSION) == API_VERSION_2:
|
||||
if CONF_TOKEN not in conf:
|
||||
raise vol.Invalid(
|
||||
f"{CONF_TOKEN} and {CONF_BUCKET} are required when"
|
||||
@@ -193,32 +195,13 @@ _INFLUX_BASE_SCHEMA = INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
|
||||
}
|
||||
)
|
||||
|
||||
INFLUX_SCHEMA = vol.All(
|
||||
_INFLUX_BASE_SCHEMA.extend(COMPONENT_CONFIG_SCHEMA_CONNECTION),
|
||||
validate_version_specific_config,
|
||||
create_influx_url,
|
||||
INFLUX_SCHEMA = _INFLUX_BASE_SCHEMA.extend(
|
||||
COMPONENT_CONFIG_SCHEMA_CONNECTION_VALIDATORS
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
cv.deprecated(CONF_API_VERSION),
|
||||
cv.deprecated(CONF_HOST),
|
||||
cv.deprecated(CONF_PATH),
|
||||
cv.deprecated(CONF_PORT),
|
||||
cv.deprecated(CONF_SSL),
|
||||
cv.deprecated(CONF_VERIFY_SSL),
|
||||
cv.deprecated(CONF_SSL_CA_CERT),
|
||||
cv.deprecated(CONF_USERNAME),
|
||||
cv.deprecated(CONF_PASSWORD),
|
||||
cv.deprecated(CONF_DB_NAME),
|
||||
cv.deprecated(CONF_TOKEN),
|
||||
cv.deprecated(CONF_ORG),
|
||||
cv.deprecated(CONF_BUCKET),
|
||||
INFLUX_SCHEMA,
|
||||
)
|
||||
},
|
||||
{DOMAIN: vol.All(INFLUX_SCHEMA, validate_version_specific_config)},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
@@ -499,23 +482,35 @@ def get_influx_connection( # noqa: C901
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the InfluxDB component."""
|
||||
conf = config.get(DOMAIN)
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
if conf is not None:
|
||||
if CONF_HOST not in conf and conf[CONF_API_VERSION] == DEFAULT_API_VERSION:
|
||||
conf[CONF_HOST] = DEFAULT_HOST
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=conf,
|
||||
)
|
||||
)
|
||||
hass.async_create_task(_async_setup(hass, config[DOMAIN]))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def _async_setup(hass: HomeAssistant, config: dict[str, Any]) -> None:
|
||||
"""Import YAML configuration into a config entry."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=config,
|
||||
)
|
||||
if (
|
||||
result.get("type") is FlowResultType.ABORT
|
||||
and (reason := result["reason"]) != "single_instance_allowed"
|
||||
):
|
||||
async_create_deprecated_yaml_issue(hass, error=reason)
|
||||
return
|
||||
|
||||
# If we are here, the entry already exists (single instance allowed)
|
||||
if config.keys() & (
|
||||
{k.schema for k in COMPONENT_CONFIG_SCHEMA_CONNECTION} - {CONF_PRECISION}
|
||||
):
|
||||
async_create_deprecated_yaml_issue(hass)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: InfluxDBConfigEntry) -> bool:
|
||||
"""Set up InfluxDB from a config entry."""
|
||||
data = entry.data
|
||||
|
||||
@@ -31,7 +31,7 @@ from homeassistant.helpers.selector import (
|
||||
)
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from . import DOMAIN, get_influx_connection
|
||||
from . import DOMAIN, create_influx_url, get_influx_connection
|
||||
from .const import (
|
||||
API_VERSION_2,
|
||||
CONF_API_VERSION,
|
||||
@@ -40,8 +40,11 @@ from .const import (
|
||||
CONF_ORG,
|
||||
CONF_SSL_CA_CERT,
|
||||
DEFAULT_API_VERSION,
|
||||
DEFAULT_BUCKET,
|
||||
DEFAULT_DATABASE,
|
||||
DEFAULT_HOST,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_VERIFY_SSL,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -240,14 +243,17 @@ class InfluxDBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
host = import_data.get(CONF_HOST)
|
||||
database = import_data.get(CONF_DB_NAME)
|
||||
bucket = import_data.get(CONF_BUCKET)
|
||||
import_data = {**import_data}
|
||||
import_data.setdefault(CONF_API_VERSION, DEFAULT_API_VERSION)
|
||||
import_data.setdefault(CONF_VERIFY_SSL, DEFAULT_VERIFY_SSL)
|
||||
import_data.setdefault(CONF_DB_NAME, DEFAULT_DATABASE)
|
||||
import_data.setdefault(CONF_BUCKET, DEFAULT_BUCKET)
|
||||
|
||||
api_version = import_data.get(CONF_API_VERSION)
|
||||
ssl = import_data.get(CONF_SSL)
|
||||
api_version = import_data[CONF_API_VERSION]
|
||||
|
||||
if api_version == DEFAULT_API_VERSION:
|
||||
host = import_data.get(CONF_HOST, DEFAULT_HOST)
|
||||
database = import_data[CONF_DB_NAME]
|
||||
title = f"{database} ({host})"
|
||||
data = {
|
||||
CONF_API_VERSION: api_version,
|
||||
@@ -256,21 +262,23 @@ class InfluxDBConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_USERNAME: import_data.get(CONF_USERNAME),
|
||||
CONF_PASSWORD: import_data.get(CONF_PASSWORD),
|
||||
CONF_DB_NAME: database,
|
||||
CONF_SSL: ssl,
|
||||
CONF_SSL: import_data.get(CONF_SSL),
|
||||
CONF_PATH: import_data.get(CONF_PATH),
|
||||
CONF_VERIFY_SSL: import_data.get(CONF_VERIFY_SSL),
|
||||
CONF_VERIFY_SSL: import_data[CONF_VERIFY_SSL],
|
||||
CONF_SSL_CA_CERT: import_data.get(CONF_SSL_CA_CERT),
|
||||
}
|
||||
else:
|
||||
create_influx_url(import_data) # Only modifies dict for api_version == 2
|
||||
bucket = import_data[CONF_BUCKET]
|
||||
url = import_data.get(CONF_URL)
|
||||
title = f"{bucket} ({url})"
|
||||
data = {
|
||||
CONF_API_VERSION: api_version,
|
||||
CONF_URL: import_data.get(CONF_URL),
|
||||
CONF_URL: url,
|
||||
CONF_TOKEN: import_data.get(CONF_TOKEN),
|
||||
CONF_ORG: import_data.get(CONF_ORG),
|
||||
CONF_BUCKET: bucket,
|
||||
CONF_VERIFY_SSL: import_data.get(CONF_VERIFY_SSL),
|
||||
CONF_VERIFY_SSL: import_data[CONF_VERIFY_SSL],
|
||||
CONF_SSL_CA_CERT: import_data.get(CONF_SSL_CA_CERT),
|
||||
}
|
||||
|
||||
|
||||
@@ -154,3 +154,14 @@ COMPONENT_CONFIG_SCHEMA_CONNECTION = {
|
||||
vol.Inclusive(CONF_ORG, "v2_authentication"): cv.string,
|
||||
vol.Optional(CONF_BUCKET, default=DEFAULT_BUCKET): cv.string,
|
||||
}
|
||||
|
||||
# Same keys without defaults, used in CONFIG_SCHEMA to validate
|
||||
# without injecting default values (so we can detect explicit keys).
|
||||
COMPONENT_CONFIG_SCHEMA_CONNECTION_VALIDATORS = {
|
||||
(
|
||||
vol.Optional(k.schema)
|
||||
if isinstance(k, vol.Optional) and k.default is not vol.UNDEFINED
|
||||
else k
|
||||
): v
|
||||
for k, v in COMPONENT_CONFIG_SCHEMA_CONNECTION.items()
|
||||
}
|
||||
|
||||
34
homeassistant/components/influxdb/issue.py
Normal file
34
homeassistant/components/influxdb/issue.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Issues for InfluxDB integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def async_create_deprecated_yaml_issue(
|
||||
hass: HomeAssistant, *, error: str | None = None
|
||||
) -> None:
|
||||
"""Create a repair issue for deprecated YAML connection configuration."""
|
||||
if error is None:
|
||||
issue_id = "deprecated_yaml"
|
||||
severity = IssueSeverity.WARNING
|
||||
else:
|
||||
issue_id = f"deprecated_yaml_import_issue_{error}"
|
||||
severity = IssueSeverity.ERROR
|
||||
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.9.0",
|
||||
severity=severity,
|
||||
translation_key=issue_id,
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"url": f"/config/integrations/dashboard/add?domain={DOMAIN}",
|
||||
},
|
||||
)
|
||||
@@ -7,7 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/influxdb",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["influxdb", "influxdb_client"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["influxdb==5.3.1", "influxdb-client==1.50.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -54,5 +54,31 @@
|
||||
"title": "Choose InfluxDB version"
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml": {
|
||||
"description": "Configuring InfluxDB connection settings using YAML is being removed. Your existing YAML connection configuration has been imported into the UI automatically.\n\nRemove the `{domain}` connection and authentication keys from your `configuration.yaml` file and restart Home Assistant to fix this issue. Other options like `include`, `exclude`, and `tags` remain in YAML for now. \n\nThe following keys should be removed:\n- `api_version`\n- `host`\n- `port`\n- `ssl`\n- `verify_ssl`\n- `ssl_ca_cert`\n- `username`\n- `password`\n- `database`\n- `token`\n- `organization`\n- `bucket`\n- `path`",
|
||||
"title": "The InfluxDB YAML configuration is being removed"
|
||||
},
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"description": "Configuring InfluxDB connection settings using YAML is being removed but the import failed because Home Assistant could not connect to the InfluxDB server.\n\nPlease correct your YAML configuration and restart Home Assistant.\n\nAlternatively you can remove the `{domain}` connection and authentication keys from your `configuration.yaml` file and continue to [set up the integration]({url}) manually. \n\nThe following keys should be removed:\n- `api_version`\n- `host`\n- `port`\n- `ssl`\n- `verify_ssl`\n- `ssl_ca_cert`\n- `username`\n- `password`\n- `database`\n- `token`\n- `organization`\n- `bucket`\n- `path`",
|
||||
"title": "Failed to import InfluxDB YAML configuration"
|
||||
},
|
||||
"deprecated_yaml_import_issue_invalid_auth": {
|
||||
"description": "Configuring InfluxDB connection settings using YAML is being removed but the import failed because the provided credentials are invalid.\n\nPlease correct your YAML configuration and restart Home Assistant.\n\nAlternatively you can remove the `{domain}` connection and authentication keys from your `configuration.yaml` file and continue to [set up the integration]({url}) manually. \n\nThe following keys should be removed:\n- `api_version`\n- `host`\n- `port`\n- `ssl`\n- `verify_ssl`\n- `ssl_ca_cert`\n- `username`\n- `password`\n- `database`\n- `token`\n- `organization`\n- `bucket`\n- `path`",
|
||||
"title": "[%key:component::influxdb::issues::deprecated_yaml_import_issue_cannot_connect::title%]"
|
||||
},
|
||||
"deprecated_yaml_import_issue_invalid_database": {
|
||||
"description": "Configuring InfluxDB connection settings using YAML is being removed but the import failed because the specified database was not found.\n\nPlease correct your YAML configuration and restart Home Assistant.\n\nAlternatively you can remove the `{domain}` connection and authentication keys from your `configuration.yaml` file and continue to [set up the integration]({url}) manually. \n\nThe following keys should be removed:\n- `api_version`\n- `host`\n- `port`\n- `ssl`\n- `verify_ssl`\n- `ssl_ca_cert`\n- `username`\n- `password`\n- `database`\n- `token`\n- `organization`\n- `bucket`\n- `path`",
|
||||
"title": "[%key:component::influxdb::issues::deprecated_yaml_import_issue_cannot_connect::title%]"
|
||||
},
|
||||
"deprecated_yaml_import_issue_ssl_error": {
|
||||
"description": "Configuring InfluxDB connection settings using YAML is being removed but the import failed due to an SSL certificate error.\n\nPlease correct your YAML configuration and restart Home Assistant.\n\nAlternatively you can remove the `{domain}` connection and authentication keys from your `configuration.yaml` file and continue to [set up the integration]({url}) manually. \n\nThe following keys should be removed:\n- `api_version`\n- `host`\n- `port`\n- `ssl`\n- `verify_ssl`\n- `ssl_ca_cert`\n- `username`\n- `password`\n- `database`\n- `token`\n- `organization`\n- `bucket`\n- `path`",
|
||||
"title": "[%key:component::influxdb::issues::deprecated_yaml_import_issue_cannot_connect::title%]"
|
||||
},
|
||||
"deprecated_yaml_import_issue_unknown": {
|
||||
"description": "Configuring InfluxDB connection settings using YAML is being removed but the import failed due to an unknown error.\n\nPlease correct your YAML configuration and restart Home Assistant.\n\nAlternatively you can remove the `{domain}` connection and authentication keys from your `configuration.yaml` file and continue to [set up the integration]({url}) manually. \n\nThe following keys should be removed:\n- `api_version`\n- `host`\n- `port`\n- `ssl`\n- `verify_ssl`\n- `ssl_ca_cert`\n- `username`\n- `password`\n- `database`\n- `token`\n- `organization`\n- `bucket`\n- `path`",
|
||||
"title": "[%key:component::influxdb::issues::deprecated_yaml_import_issue_cannot_connect::title%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,66 +2,21 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import pyiss
|
||||
import requests
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .coordinator import IssConfigEntry, IssDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
@dataclass
|
||||
class IssData:
|
||||
"""Dataclass representation of data returned from pyiss."""
|
||||
|
||||
number_of_people_in_space: int
|
||||
current_location: dict[str, str]
|
||||
|
||||
|
||||
def update(iss: pyiss.ISS) -> IssData:
|
||||
"""Retrieve data from the pyiss API."""
|
||||
return IssData(
|
||||
number_of_people_in_space=iss.number_of_people_in_space(),
|
||||
current_location=iss.current_location(),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: IssConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
iss = pyiss.ISS()
|
||||
|
||||
async def async_update() -> IssData:
|
||||
try:
|
||||
return await hass.async_add_executor_job(update, iss)
|
||||
except (HTTPError, requests.exceptions.ConnectionError) as ex:
|
||||
raise UpdateFailed("Unable to retrieve data") from ex
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=async_update,
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
coordinator = IssDataUpdateCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
|
||||
@@ -70,13 +25,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: IssConfigEntry) -> bool:
|
||||
"""Handle removal of an entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
del hass.data[DOMAIN]
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def update_listener(hass: HomeAssistant, entry: IssConfigEntry) -> None:
|
||||
"""Handle options update."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
@@ -4,16 +4,12 @@ from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
||||
from homeassistant.const import CONF_SHOW_ON_MAP
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
from .coordinator import IssConfigEntry
|
||||
|
||||
|
||||
class ISSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -24,7 +20,7 @@ class ISSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
config_entry: IssConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler()
|
||||
|
||||
@@ -3,3 +3,5 @@
|
||||
DOMAIN = "iss"
|
||||
|
||||
DEFAULT_NAME = "ISS"
|
||||
|
||||
MAX_CONSECUTIVE_FAILURES = 5
|
||||
|
||||
76
homeassistant/components/iss/coordinator.py
Normal file
76
homeassistant/components/iss/coordinator.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""DataUpdateCoordinator for the ISS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import pyiss
|
||||
import requests
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN, MAX_CONSECUTIVE_FAILURES
|
||||
|
||||
type IssConfigEntry = ConfigEntry[IssDataUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IssData:
|
||||
"""Dataclass representation of data returned from pyiss."""
|
||||
|
||||
number_of_people_in_space: int
|
||||
current_location: dict[str, str]
|
||||
|
||||
|
||||
class IssDataUpdateCoordinator(DataUpdateCoordinator[IssData]):
|
||||
"""ISS coordinator that tolerates transient API failures."""
|
||||
|
||||
config_entry: IssConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: IssConfigEntry) -> None:
|
||||
"""Initialize the ISS coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
self._consecutive_failures = 0
|
||||
self.iss = pyiss.ISS()
|
||||
|
||||
def _fetch_iss_data(self) -> IssData:
|
||||
"""Fetch data from ISS API (blocking)."""
|
||||
return IssData(
|
||||
number_of_people_in_space=self.iss.number_of_people_in_space(),
|
||||
current_location=self.iss.current_location(),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> IssData:
|
||||
"""Fetch data from the ISS API, tolerating transient failures."""
|
||||
try:
|
||||
data = await self.hass.async_add_executor_job(self._fetch_iss_data)
|
||||
except (HTTPError, requests.exceptions.ConnectionError) as err:
|
||||
self._consecutive_failures += 1
|
||||
if self.data is None:
|
||||
raise UpdateFailed("Unable to retrieve data") from err
|
||||
if self._consecutive_failures >= MAX_CONSECUTIVE_FAILURES:
|
||||
raise UpdateFailed(
|
||||
f"Unable to retrieve data after {self._consecutive_failures} consecutive update failures"
|
||||
) from err
|
||||
_LOGGER.debug(
|
||||
"Transient API error (%s/%s), using cached data: %s",
|
||||
self._consecutive_failures,
|
||||
MAX_CONSECUTIVE_FAILURES,
|
||||
err,
|
||||
)
|
||||
return self.data
|
||||
self._consecutive_failures = 0
|
||||
return data
|
||||
@@ -6,36 +6,32 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_SHOW_ON_MAP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import IssData
|
||||
from .const import DEFAULT_NAME, DOMAIN
|
||||
from .coordinator import IssConfigEntry, IssDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: IssConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
coordinator: DataUpdateCoordinator[IssData] = hass.data[DOMAIN]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
show_on_map = entry.options.get(CONF_SHOW_ON_MAP, False)
|
||||
|
||||
async_add_entities([IssSensor(coordinator, entry, show_on_map)])
|
||||
|
||||
|
||||
class IssSensor(CoordinatorEntity[DataUpdateCoordinator[IssData]], SensorEntity):
|
||||
class IssSensor(CoordinatorEntity[IssDataUpdateCoordinator], SensorEntity):
|
||||
"""Implementation of the ISS sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
@@ -43,8 +39,8 @@ class IssSensor(CoordinatorEntity[DataUpdateCoordinator[IssData]], SensorEntity)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator[IssData],
|
||||
entry: ConfigEntry,
|
||||
coordinator: IssDataUpdateCoordinator,
|
||||
entry: IssConfigEntry,
|
||||
show: bool,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aionotify", "evdev"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["evdev==1.6.1", "asyncinotify==4.2.0"]
|
||||
"requirements": ["evdev==1.9.3", "asyncinotify==4.4.0"]
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
"requirements": [
|
||||
"xknx==3.15.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2026.2.25.165736"
|
||||
"knx-frontend==2026.3.2.183756"
|
||||
],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -67,6 +67,22 @@ NUMBER_SETTINGS_DATA = [
|
||||
fmt_from="format_round",
|
||||
fmt_to="format_round_back",
|
||||
),
|
||||
PlenticoreNumberEntityDescription(
|
||||
key="active_power_limitation",
|
||||
device_class=NumberDeviceClass.POWER,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
icon="mdi:solar-power",
|
||||
name="Active Power Limitation",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
native_max_value=10000,
|
||||
native_min_value=0,
|
||||
native_step=1,
|
||||
module_id="devices:local",
|
||||
data_id="Inverter:ActivePowerLimitation",
|
||||
fmt_from="format_round",
|
||||
fmt_to="format_round_back",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ def _convert_uint8_to_percentage(value: Any) -> float:
|
||||
class BrightnessChangedTrigger(EntityNumericalStateAttributeChangedTriggerBase):
|
||||
"""Trigger for brightness changed."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_domains = {DOMAIN}
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
@@ -34,7 +34,7 @@ class BrightnessCrossedThresholdTrigger(
|
||||
):
|
||||
"""Trigger for brightness crossed threshold."""
|
||||
|
||||
_domain = DOMAIN
|
||||
_domains = {DOMAIN}
|
||||
_attribute = ATTR_BRIGHTNESS
|
||||
_converter = staticmethod(_convert_uint8_to_percentage)
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pylutron_caseta"],
|
||||
"requirements": ["pylutron-caseta==0.26.0"],
|
||||
"requirements": ["pylutron-caseta==0.27.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"properties": {
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
import logging
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
|
||||
ADDON_SLUG = "core_matter_server"
|
||||
|
||||
CONF_INTEGRATION_CREATED_ADDON = "integration_created_addon"
|
||||
@@ -15,3 +17,100 @@ ID_TYPE_DEVICE_ID = "deviceid"
|
||||
ID_TYPE_SERIAL = "serial"
|
||||
|
||||
FEATUREMAP_ATTRIBUTE_ID = 65532
|
||||
|
||||
# --- Lock domain constants ---
|
||||
|
||||
# Shared field keys
|
||||
ATTR_CREDENTIAL_RULE = "credential_rule"
|
||||
ATTR_MAX_CREDENTIALS_PER_USER = "max_credentials_per_user"
|
||||
ATTR_MAX_PIN_USERS = "max_pin_users"
|
||||
ATTR_MAX_RFID_USERS = "max_rfid_users"
|
||||
ATTR_MAX_USERS = "max_users"
|
||||
ATTR_SUPPORTS_USER_MGMT = "supports_user_management"
|
||||
ATTR_USER_INDEX = "user_index"
|
||||
ATTR_USER_NAME = "user_name"
|
||||
ATTR_USER_STATUS = "user_status"
|
||||
ATTR_USER_TYPE = "user_type"
|
||||
|
||||
# Magic values
|
||||
CLEAR_ALL_INDEX = 0xFFFE # Matter spec: pass to ClearUser/ClearCredential to clear all
|
||||
|
||||
# Timed request timeout for lock commands that modify state.
|
||||
# 10 seconds accounts for Thread network latency and retransmissions.
|
||||
LOCK_TIMED_REQUEST_TIMEOUT_MS = 10000
|
||||
|
||||
# Credential field keys
|
||||
ATTR_CREDENTIAL_DATA = "credential_data"
|
||||
ATTR_CREDENTIAL_INDEX = "credential_index"
|
||||
ATTR_CREDENTIAL_TYPE = "credential_type"
|
||||
|
||||
# Credential type strings
|
||||
CRED_TYPE_FACE = "face"
|
||||
CRED_TYPE_FINGERPRINT = "fingerprint"
|
||||
CRED_TYPE_FINGER_VEIN = "finger_vein"
|
||||
CRED_TYPE_PIN = "pin"
|
||||
CRED_TYPE_RFID = "rfid"
|
||||
|
||||
# User status mapping (Matter DoorLock UserStatusEnum)
|
||||
_UserStatus = clusters.DoorLock.Enums.UserStatusEnum
|
||||
USER_STATUS_MAP: dict[int, str] = {
|
||||
_UserStatus.kAvailable: "available",
|
||||
_UserStatus.kOccupiedEnabled: "occupied_enabled",
|
||||
_UserStatus.kOccupiedDisabled: "occupied_disabled",
|
||||
}
|
||||
USER_STATUS_REVERSE_MAP: dict[str, int] = {v: k for k, v in USER_STATUS_MAP.items()}
|
||||
|
||||
# User type mapping (Matter DoorLock UserTypeEnum)
|
||||
_UserType = clusters.DoorLock.Enums.UserTypeEnum
|
||||
USER_TYPE_MAP: dict[int, str] = {
|
||||
_UserType.kUnrestrictedUser: "unrestricted_user",
|
||||
_UserType.kYearDayScheduleUser: "year_day_schedule_user",
|
||||
_UserType.kWeekDayScheduleUser: "week_day_schedule_user",
|
||||
_UserType.kProgrammingUser: "programming_user",
|
||||
_UserType.kNonAccessUser: "non_access_user",
|
||||
_UserType.kForcedUser: "forced_user",
|
||||
_UserType.kDisposableUser: "disposable_user",
|
||||
_UserType.kExpiringUser: "expiring_user",
|
||||
_UserType.kScheduleRestrictedUser: "schedule_restricted_user",
|
||||
_UserType.kRemoteOnlyUser: "remote_only_user",
|
||||
}
|
||||
USER_TYPE_REVERSE_MAP: dict[str, int] = {v: k for k, v in USER_TYPE_MAP.items()}
|
||||
|
||||
# Credential type mapping (Matter DoorLock CredentialTypeEnum)
|
||||
_CredentialType = clusters.DoorLock.Enums.CredentialTypeEnum
|
||||
CREDENTIAL_TYPE_MAP: dict[int, str] = {
|
||||
_CredentialType.kProgrammingPIN: "programming_pin",
|
||||
_CredentialType.kPin: CRED_TYPE_PIN,
|
||||
_CredentialType.kRfid: CRED_TYPE_RFID,
|
||||
_CredentialType.kFingerprint: CRED_TYPE_FINGERPRINT,
|
||||
_CredentialType.kFingerVein: CRED_TYPE_FINGER_VEIN,
|
||||
_CredentialType.kFace: CRED_TYPE_FACE,
|
||||
_CredentialType.kAliroCredentialIssuerKey: "aliro_credential_issuer_key",
|
||||
_CredentialType.kAliroEvictableEndpointKey: "aliro_evictable_endpoint_key",
|
||||
_CredentialType.kAliroNonEvictableEndpointKey: "aliro_non_evictable_endpoint_key",
|
||||
}
|
||||
|
||||
# Credential rule mapping (Matter DoorLock CredentialRuleEnum)
|
||||
_CredentialRule = clusters.DoorLock.Enums.CredentialRuleEnum
|
||||
CREDENTIAL_RULE_MAP: dict[int, str] = {
|
||||
_CredentialRule.kSingle: "single",
|
||||
_CredentialRule.kDual: "dual",
|
||||
_CredentialRule.kTri: "tri",
|
||||
}
|
||||
CREDENTIAL_RULE_REVERSE_MAP: dict[str, int] = {
|
||||
v: k for k, v in CREDENTIAL_RULE_MAP.items()
|
||||
}
|
||||
|
||||
# Reverse mapping for credential types (str -> int)
|
||||
CREDENTIAL_TYPE_REVERSE_MAP: dict[str, int] = {
|
||||
v: k for k, v in CREDENTIAL_TYPE_MAP.items()
|
||||
}
|
||||
|
||||
# Credential types allowed in set/clear services (excludes programming_pin, aliro_*)
|
||||
SERVICE_CREDENTIAL_TYPES = [
|
||||
CRED_TYPE_PIN,
|
||||
CRED_TYPE_RFID,
|
||||
CRED_TYPE_FINGERPRINT,
|
||||
CRED_TYPE_FINGER_VEIN,
|
||||
CRED_TYPE_FACE,
|
||||
]
|
||||
|
||||
@@ -174,6 +174,27 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"clear_lock_credential": {
|
||||
"service": "mdi:key-remove"
|
||||
},
|
||||
"clear_lock_user": {
|
||||
"service": "mdi:account-remove"
|
||||
},
|
||||
"get_lock_credential_status": {
|
||||
"service": "mdi:key-chain"
|
||||
},
|
||||
"get_lock_info": {
|
||||
"service": "mdi:lock-question"
|
||||
},
|
||||
"get_lock_users": {
|
||||
"service": "mdi:account-multiple"
|
||||
},
|
||||
"set_lock_credential": {
|
||||
"service": "mdi:key-plus"
|
||||
},
|
||||
"set_lock_user": {
|
||||
"service": "mdi:account-lock"
|
||||
},
|
||||
"water_heater_boost": {
|
||||
"service": "mdi:water-boiler"
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from matter_server.common.errors import MatterError
|
||||
from matter_server.common.models import EventType, MatterNodeEvent
|
||||
|
||||
from homeassistant.components.lock import (
|
||||
@@ -17,32 +18,56 @@ from homeassistant.components.lock import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_CODE, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import LOGGER
|
||||
from .const import (
|
||||
ATTR_CREDENTIAL_DATA,
|
||||
ATTR_CREDENTIAL_INDEX,
|
||||
ATTR_CREDENTIAL_RULE,
|
||||
ATTR_CREDENTIAL_TYPE,
|
||||
ATTR_USER_INDEX,
|
||||
ATTR_USER_NAME,
|
||||
ATTR_USER_STATUS,
|
||||
ATTR_USER_TYPE,
|
||||
LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
LOGGER,
|
||||
)
|
||||
from .entity import MatterEntity, MatterEntityDescription
|
||||
from .helpers import get_matter
|
||||
from .lock_helpers import (
|
||||
DoorLockFeature,
|
||||
GetLockCredentialStatusResult,
|
||||
GetLockInfoResult,
|
||||
GetLockUsersResult,
|
||||
SetLockCredentialResult,
|
||||
clear_lock_credential,
|
||||
clear_lock_user,
|
||||
get_lock_credential_status,
|
||||
get_lock_info,
|
||||
get_lock_users,
|
||||
set_lock_credential,
|
||||
set_lock_user,
|
||||
)
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
DOOR_LOCK_OPERATION_SOURCE = {
|
||||
# mapping from operation source id's to textual representation
|
||||
0: "Unspecified",
|
||||
1: "Manual", # [Optional]
|
||||
2: "Proprietary Remote", # [Optional]
|
||||
3: "Keypad", # [Optional]
|
||||
4: "Auto", # [Optional]
|
||||
5: "Button", # [Optional]
|
||||
6: "Schedule", # [HDSCH]
|
||||
7: "Remote", # [M]
|
||||
8: "RFID", # [RID]
|
||||
9: "Biometric", # [USR]
|
||||
10: "Aliro", # [Aliro]
|
||||
# Door lock operation source mapping (Matter DoorLock OperationSourceEnum)
|
||||
_OperationSource = clusters.DoorLock.Enums.OperationSourceEnum
|
||||
DOOR_LOCK_OPERATION_SOURCE: dict[int, str] = {
|
||||
_OperationSource.kUnspecified: "Unspecified",
|
||||
_OperationSource.kManual: "Manual",
|
||||
_OperationSource.kProprietaryRemote: "Proprietary Remote",
|
||||
_OperationSource.kKeypad: "Keypad",
|
||||
_OperationSource.kAuto: "Auto",
|
||||
_OperationSource.kButton: "Button",
|
||||
_OperationSource.kSchedule: "Schedule",
|
||||
_OperationSource.kRemote: "Remote",
|
||||
_OperationSource.kRfid: "RFID",
|
||||
_OperationSource.kBiometric: "Biometric",
|
||||
_OperationSource.kAliro: "Aliro",
|
||||
}
|
||||
|
||||
|
||||
DoorLockFeature = clusters.DoorLock.Bitmaps.Feature
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -98,17 +123,15 @@ class MatterLock(MatterEntity, LockEntity):
|
||||
node_event.data,
|
||||
)
|
||||
|
||||
# handle the DoorLock events
|
||||
# Handle the DoorLock events
|
||||
node_event_data: dict[str, int] = node_event.data or {}
|
||||
match node_event.event_id:
|
||||
case (
|
||||
clusters.DoorLock.Events.LockOperation.event_id
|
||||
): # Lock cluster event 2
|
||||
# update the changed_by attribute to indicate lock operation source
|
||||
case clusters.DoorLock.Events.LockOperation.event_id:
|
||||
operation_source: int = node_event_data.get("operationSource", -1)
|
||||
self._attr_changed_by = DOOR_LOCK_OPERATION_SOURCE.get(
|
||||
source_name = DOOR_LOCK_OPERATION_SOURCE.get(
|
||||
operation_source, "Unknown"
|
||||
)
|
||||
self._attr_changed_by = source_name
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
@@ -146,7 +169,7 @@ class MatterLock(MatterEntity, LockEntity):
|
||||
code_bytes = code.encode() if code else None
|
||||
await self.send_device_command(
|
||||
command=clusters.DoorLock.Commands.LockDoor(code_bytes),
|
||||
timed_request_timeout_ms=1000,
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
async def async_unlock(self, **kwargs: Any) -> None:
|
||||
@@ -168,12 +191,12 @@ class MatterLock(MatterEntity, LockEntity):
|
||||
# and unlatch on the HA 'open' command.
|
||||
await self.send_device_command(
|
||||
command=clusters.DoorLock.Commands.UnboltDoor(code_bytes),
|
||||
timed_request_timeout_ms=1000,
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
else:
|
||||
await self.send_device_command(
|
||||
command=clusters.DoorLock.Commands.UnlockDoor(code_bytes),
|
||||
timed_request_timeout_ms=1000,
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
async def async_open(self, **kwargs: Any) -> None:
|
||||
@@ -190,7 +213,7 @@ class MatterLock(MatterEntity, LockEntity):
|
||||
code_bytes = code.encode() if code else None
|
||||
await self.send_device_command(
|
||||
command=clusters.DoorLock.Commands.UnlockDoor(code_bytes),
|
||||
timed_request_timeout_ms=1000,
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
@callback
|
||||
@@ -256,6 +279,109 @@ class MatterLock(MatterEntity, LockEntity):
|
||||
supported_features |= LockEntityFeature.OPEN
|
||||
self._attr_supported_features = supported_features
|
||||
|
||||
# --- Entity service methods ---
|
||||
|
||||
async def async_set_lock_user(self, **kwargs: Any) -> None:
|
||||
"""Set a lock user (full CRUD)."""
|
||||
try:
|
||||
await set_lock_user(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
user_index=kwargs.get(ATTR_USER_INDEX),
|
||||
user_name=kwargs.get(ATTR_USER_NAME),
|
||||
user_type=kwargs.get(ATTR_USER_TYPE),
|
||||
credential_rule=kwargs.get(ATTR_CREDENTIAL_RULE),
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set lock user on {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_clear_lock_user(self, **kwargs: Any) -> None:
|
||||
"""Clear a lock user."""
|
||||
try:
|
||||
await clear_lock_user(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
kwargs[ATTR_USER_INDEX],
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to clear lock user on {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_get_lock_info(self) -> GetLockInfoResult:
|
||||
"""Get lock capabilities and configuration info."""
|
||||
try:
|
||||
return await get_lock_info(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to get lock info for {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_get_lock_users(self) -> GetLockUsersResult:
|
||||
"""Get all users from the lock."""
|
||||
try:
|
||||
return await get_lock_users(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to get lock users for {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_set_lock_credential(self, **kwargs: Any) -> SetLockCredentialResult:
|
||||
"""Set a credential on the lock."""
|
||||
try:
|
||||
return await set_lock_credential(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
credential_type=kwargs[ATTR_CREDENTIAL_TYPE],
|
||||
credential_data=kwargs[ATTR_CREDENTIAL_DATA],
|
||||
credential_index=kwargs.get(ATTR_CREDENTIAL_INDEX),
|
||||
user_index=kwargs.get(ATTR_USER_INDEX),
|
||||
user_status=kwargs.get(ATTR_USER_STATUS),
|
||||
user_type=kwargs.get(ATTR_USER_TYPE),
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set lock credential on {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_clear_lock_credential(self, **kwargs: Any) -> None:
|
||||
"""Clear a credential from the lock."""
|
||||
try:
|
||||
await clear_lock_credential(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
credential_type=kwargs[ATTR_CREDENTIAL_TYPE],
|
||||
credential_index=kwargs[ATTR_CREDENTIAL_INDEX],
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to clear lock credential on {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
async def async_get_lock_credential_status(
|
||||
self, **kwargs: Any
|
||||
) -> GetLockCredentialStatusResult:
|
||||
"""Get the status of a credential slot on the lock."""
|
||||
try:
|
||||
return await get_lock_credential_status(
|
||||
self.matter_client,
|
||||
self._endpoint.node,
|
||||
credential_type=kwargs[ATTR_CREDENTIAL_TYPE],
|
||||
credential_index=kwargs[ATTR_CREDENTIAL_INDEX],
|
||||
)
|
||||
except MatterError as err:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to get credential status for {self.entity_id}: {err}"
|
||||
) from err
|
||||
|
||||
|
||||
DISCOVERY_SCHEMAS = [
|
||||
MatterDiscoverySchema(
|
||||
|
||||
843
homeassistant/components/matter/lock_helpers.py
Normal file
843
homeassistant/components/matter/lock_helpers.py
Normal file
@@ -0,0 +1,843 @@
|
||||
"""Lock-specific helpers for the Matter integration.
|
||||
|
||||
Provides DoorLock cluster endpoint resolution, feature detection, and
|
||||
business logic for lock user/credential management.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING, Any, TypedDict
|
||||
|
||||
from chip.clusters import Objects as clusters
|
||||
from chip.clusters.Types import NullValue
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
|
||||
from .const import (
|
||||
CRED_TYPE_FACE,
|
||||
CRED_TYPE_FINGER_VEIN,
|
||||
CRED_TYPE_FINGERPRINT,
|
||||
CRED_TYPE_PIN,
|
||||
CRED_TYPE_RFID,
|
||||
CREDENTIAL_RULE_MAP,
|
||||
CREDENTIAL_RULE_REVERSE_MAP,
|
||||
CREDENTIAL_TYPE_MAP,
|
||||
CREDENTIAL_TYPE_REVERSE_MAP,
|
||||
LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
USER_STATUS_MAP,
|
||||
USER_STATUS_REVERSE_MAP,
|
||||
USER_TYPE_MAP,
|
||||
USER_TYPE_REVERSE_MAP,
|
||||
)
|
||||
|
||||
# Error translation keys (used in ServiceValidationError/HomeAssistantError)
|
||||
ERR_CREDENTIAL_TYPE_NOT_SUPPORTED = "credential_type_not_supported"
|
||||
ERR_INVALID_CREDENTIAL_DATA = "invalid_credential_data"
|
||||
|
||||
# SetCredential response status mapping (Matter DlStatus)
|
||||
_DlStatus = clusters.DoorLock.Enums.DlStatus
|
||||
SET_CREDENTIAL_STATUS_MAP: dict[int, str] = {
|
||||
_DlStatus.kSuccess: "success",
|
||||
_DlStatus.kFailure: "failure",
|
||||
_DlStatus.kDuplicate: "duplicate",
|
||||
_DlStatus.kOccupied: "occupied",
|
||||
}
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from matter_server.client import MatterClient
|
||||
from matter_server.client.models.node import MatterEndpoint, MatterNode
|
||||
|
||||
# DoorLock Feature bitmap from Matter SDK
|
||||
DoorLockFeature = clusters.DoorLock.Bitmaps.Feature
|
||||
|
||||
|
||||
# --- TypedDicts for service action responses ---
|
||||
|
||||
|
||||
class LockUserCredentialData(TypedDict):
|
||||
"""Credential data within a user response."""
|
||||
|
||||
type: str
|
||||
index: int | None
|
||||
|
||||
|
||||
class LockUserData(TypedDict):
|
||||
"""User data returned from lock queries."""
|
||||
|
||||
user_index: int | None
|
||||
user_name: str | None
|
||||
user_unique_id: int | None
|
||||
user_status: str
|
||||
user_type: str
|
||||
credential_rule: str
|
||||
credentials: list[LockUserCredentialData]
|
||||
next_user_index: int | None
|
||||
|
||||
|
||||
class SetLockUserResult(TypedDict):
|
||||
"""Result of set_lock_user service action."""
|
||||
|
||||
user_index: int
|
||||
|
||||
|
||||
class GetLockUsersResult(TypedDict):
|
||||
"""Result of get_lock_users service action."""
|
||||
|
||||
max_users: int
|
||||
users: list[LockUserData]
|
||||
|
||||
|
||||
class GetLockInfoResult(TypedDict):
|
||||
"""Result of get_lock_info service action."""
|
||||
|
||||
supports_user_management: bool
|
||||
supported_credential_types: list[str]
|
||||
max_users: int | None
|
||||
max_pin_users: int | None
|
||||
max_rfid_users: int | None
|
||||
max_credentials_per_user: int | None
|
||||
min_pin_length: int | None
|
||||
max_pin_length: int | None
|
||||
min_rfid_length: int | None
|
||||
max_rfid_length: int | None
|
||||
|
||||
|
||||
class SetLockCredentialResult(TypedDict):
|
||||
"""Result of set_lock_credential service action."""
|
||||
|
||||
credential_index: int
|
||||
user_index: int | None
|
||||
next_credential_index: int | None
|
||||
|
||||
|
||||
class GetLockCredentialStatusResult(TypedDict):
|
||||
"""Result of get_lock_credential_status service action."""
|
||||
|
||||
credential_exists: bool
|
||||
user_index: int | None
|
||||
next_credential_index: int | None
|
||||
|
||||
|
||||
def _get_lock_endpoint_from_node(node: MatterNode) -> MatterEndpoint | None:
|
||||
"""Get the DoorLock endpoint from a node.
|
||||
|
||||
Returns the first endpoint that has the DoorLock cluster, or None if not found.
|
||||
"""
|
||||
for endpoint in node.endpoints.values():
|
||||
if endpoint.has_cluster(clusters.DoorLock):
|
||||
return endpoint
|
||||
return None
|
||||
|
||||
|
||||
def _get_feature_map(endpoint: MatterEndpoint) -> int | None:
|
||||
"""Read the DoorLock FeatureMap attribute from an endpoint."""
|
||||
value: int | None = endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.FeatureMap
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
def _lock_supports_usr_feature(endpoint: MatterEndpoint) -> bool:
|
||||
"""Check if lock endpoint supports USR (User) feature.
|
||||
|
||||
The USR feature indicates the lock supports user and credential management
|
||||
commands like SetUser, GetUser, SetCredential, etc.
|
||||
"""
|
||||
feature_map = _get_feature_map(endpoint)
|
||||
if feature_map is None:
|
||||
return False
|
||||
return bool(feature_map & DoorLockFeature.kUser)
|
||||
|
||||
|
||||
# --- Pure utility functions ---
|
||||
|
||||
|
||||
def _get_attr(obj: Any, attr: str) -> Any:
|
||||
"""Get attribute from object or dict.
|
||||
|
||||
Matter SDK responses can be either dataclass objects or dicts depending on
|
||||
the SDK version and serialization context. NullValue (a truthy,
|
||||
non-iterable singleton) is normalized to None.
|
||||
"""
|
||||
if isinstance(obj, dict):
|
||||
value = obj.get(attr)
|
||||
else:
|
||||
value = getattr(obj, attr, None)
|
||||
# The Matter SDK uses NullValue for nullable fields instead of None.
|
||||
if value is NullValue:
|
||||
return None
|
||||
return value
|
||||
|
||||
|
||||
def _get_supported_credential_types(feature_map: int) -> list[str]:
|
||||
"""Get list of supported credential types from feature map."""
|
||||
types = []
|
||||
if feature_map & DoorLockFeature.kPinCredential:
|
||||
types.append(CRED_TYPE_PIN)
|
||||
if feature_map & DoorLockFeature.kRfidCredential:
|
||||
types.append(CRED_TYPE_RFID)
|
||||
if feature_map & DoorLockFeature.kFingerCredentials:
|
||||
types.append(CRED_TYPE_FINGERPRINT)
|
||||
if feature_map & DoorLockFeature.kFaceCredentials:
|
||||
types.append(CRED_TYPE_FACE)
|
||||
return types
|
||||
|
||||
|
||||
def _format_user_response(user_data: Any) -> LockUserData | None:
|
||||
"""Format GetUser response to API response format.
|
||||
|
||||
Returns None if the user slot is empty (no userStatus).
|
||||
"""
|
||||
if user_data is None:
|
||||
return None
|
||||
|
||||
user_status = _get_attr(user_data, "userStatus")
|
||||
if user_status is None:
|
||||
return None
|
||||
|
||||
creds = _get_attr(user_data, "credentials")
|
||||
credentials: list[LockUserCredentialData] = [
|
||||
LockUserCredentialData(
|
||||
type=CREDENTIAL_TYPE_MAP.get(_get_attr(cred, "credentialType"), "unknown"),
|
||||
index=_get_attr(cred, "credentialIndex"),
|
||||
)
|
||||
for cred in (creds or [])
|
||||
]
|
||||
|
||||
return LockUserData(
|
||||
user_index=_get_attr(user_data, "userIndex"),
|
||||
user_name=_get_attr(user_data, "userName"),
|
||||
user_unique_id=_get_attr(user_data, "userUniqueID"),
|
||||
user_status=USER_STATUS_MAP.get(user_status, "unknown"),
|
||||
user_type=USER_TYPE_MAP.get(_get_attr(user_data, "userType"), "unknown"),
|
||||
credential_rule=CREDENTIAL_RULE_MAP.get(
|
||||
_get_attr(user_data, "credentialRule"), "unknown"
|
||||
),
|
||||
credentials=credentials,
|
||||
next_user_index=_get_attr(user_data, "nextUserIndex"),
|
||||
)
|
||||
|
||||
|
||||
# --- Credential management helpers ---
|
||||
|
||||
|
||||
class LockEndpointNotFoundError(HomeAssistantError):
|
||||
"""Lock endpoint not found on node."""
|
||||
|
||||
|
||||
class UsrFeatureNotSupportedError(ServiceValidationError):
|
||||
"""Lock does not support USR (user management) feature."""
|
||||
|
||||
|
||||
class UserSlotEmptyError(ServiceValidationError):
|
||||
"""User slot is empty."""
|
||||
|
||||
|
||||
class NoAvailableUserSlotsError(ServiceValidationError):
|
||||
"""No available user slots on the lock."""
|
||||
|
||||
|
||||
class CredentialTypeNotSupportedError(ServiceValidationError):
|
||||
"""Lock does not support the requested credential type."""
|
||||
|
||||
|
||||
class CredentialDataInvalidError(ServiceValidationError):
|
||||
"""Credential data fails validation."""
|
||||
|
||||
|
||||
class SetCredentialFailedError(HomeAssistantError):
|
||||
"""SetCredential command returned a non-success status."""
|
||||
|
||||
|
||||
def _get_lock_endpoint_or_raise(node: MatterNode) -> MatterEndpoint:
|
||||
"""Get the DoorLock endpoint from a node or raise an error."""
|
||||
lock_endpoint = _get_lock_endpoint_from_node(node)
|
||||
if lock_endpoint is None:
|
||||
raise LockEndpointNotFoundError("No lock endpoint found on this device")
|
||||
return lock_endpoint
|
||||
|
||||
|
||||
def _ensure_usr_support(lock_endpoint: MatterEndpoint) -> None:
|
||||
"""Ensure the lock endpoint supports USR (user management) feature.
|
||||
|
||||
Raises UsrFeatureNotSupportedError if the lock doesn't support user management.
|
||||
"""
|
||||
if not _lock_supports_usr_feature(lock_endpoint):
|
||||
raise UsrFeatureNotSupportedError(
|
||||
"Lock does not support user/credential management"
|
||||
)
|
||||
|
||||
|
||||
# --- High-level business logic functions ---
|
||||
|
||||
|
||||
async def get_lock_info(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
) -> GetLockInfoResult:
|
||||
"""Get lock capabilities and configuration info.
|
||||
|
||||
Returns a typed dict with lock capability information.
|
||||
Raises HomeAssistantError if lock endpoint not found.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
supports_usr = _lock_supports_usr_feature(lock_endpoint)
|
||||
|
||||
# Get feature map for credential type detection
|
||||
feature_map = (
|
||||
lock_endpoint.get_attribute_value(None, clusters.DoorLock.Attributes.FeatureMap)
|
||||
or 0
|
||||
)
|
||||
|
||||
result = GetLockInfoResult(
|
||||
supports_user_management=supports_usr,
|
||||
supported_credential_types=_get_supported_credential_types(feature_map),
|
||||
max_users=None,
|
||||
max_pin_users=None,
|
||||
max_rfid_users=None,
|
||||
max_credentials_per_user=None,
|
||||
min_pin_length=None,
|
||||
max_pin_length=None,
|
||||
min_rfid_length=None,
|
||||
max_rfid_length=None,
|
||||
)
|
||||
|
||||
# Populate capacity info if USR feature is supported
|
||||
if supports_usr:
|
||||
result["max_users"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.NumberOfTotalUsersSupported
|
||||
)
|
||||
result["max_pin_users"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.NumberOfPINUsersSupported
|
||||
)
|
||||
result["max_rfid_users"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.NumberOfRFIDUsersSupported
|
||||
)
|
||||
result["max_credentials_per_user"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.NumberOfCredentialsSupportedPerUser
|
||||
)
|
||||
result["min_pin_length"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MinPINCodeLength
|
||||
)
|
||||
result["max_pin_length"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MaxPINCodeLength
|
||||
)
|
||||
result["min_rfid_length"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MinRFIDCodeLength
|
||||
)
|
||||
result["max_rfid_length"] = lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MaxRFIDCodeLength
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def set_lock_user(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
*,
|
||||
user_index: int | None = None,
|
||||
user_name: str | None = None,
|
||||
user_unique_id: int | None = None,
|
||||
user_status: str | None = None,
|
||||
user_type: str | None = None,
|
||||
credential_rule: str | None = None,
|
||||
) -> SetLockUserResult:
|
||||
"""Add or update a user on the lock.
|
||||
|
||||
When user_status, user_type, or credential_rule is None, defaults are used
|
||||
for new users and existing values are preserved for modifications.
|
||||
|
||||
Returns typed dict with user_index on success.
|
||||
Raises HomeAssistantError on failure.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
|
||||
if user_index is None:
|
||||
# Adding new user - find first available slot
|
||||
max_users = (
|
||||
lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.NumberOfTotalUsersSupported
|
||||
)
|
||||
or 0
|
||||
)
|
||||
|
||||
for idx in range(1, max_users + 1):
|
||||
get_user_response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetUser(userIndex=idx),
|
||||
)
|
||||
if _get_attr(get_user_response, "userStatus") is None:
|
||||
user_index = idx
|
||||
break
|
||||
|
||||
if user_index is None:
|
||||
raise NoAvailableUserSlotsError("No available user slots on the lock")
|
||||
|
||||
user_status_enum = (
|
||||
USER_STATUS_REVERSE_MAP.get(
|
||||
user_status,
|
||||
clusters.DoorLock.Enums.UserStatusEnum.kOccupiedEnabled,
|
||||
)
|
||||
if user_status is not None
|
||||
else clusters.DoorLock.Enums.UserStatusEnum.kOccupiedEnabled
|
||||
)
|
||||
|
||||
await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.SetUser(
|
||||
operationType=clusters.DoorLock.Enums.DataOperationTypeEnum.kAdd,
|
||||
userIndex=user_index,
|
||||
userName=user_name,
|
||||
userUniqueID=user_unique_id,
|
||||
userStatus=user_status_enum,
|
||||
userType=USER_TYPE_REVERSE_MAP.get(
|
||||
user_type,
|
||||
clusters.DoorLock.Enums.UserTypeEnum.kUnrestrictedUser,
|
||||
)
|
||||
if user_type is not None
|
||||
else clusters.DoorLock.Enums.UserTypeEnum.kUnrestrictedUser,
|
||||
credentialRule=CREDENTIAL_RULE_REVERSE_MAP.get(
|
||||
credential_rule,
|
||||
clusters.DoorLock.Enums.CredentialRuleEnum.kSingle,
|
||||
)
|
||||
if credential_rule is not None
|
||||
else clusters.DoorLock.Enums.CredentialRuleEnum.kSingle,
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
else:
|
||||
# Updating existing user - preserve existing values when not specified
|
||||
get_user_response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetUser(userIndex=user_index),
|
||||
)
|
||||
|
||||
if _get_attr(get_user_response, "userStatus") is None:
|
||||
raise UserSlotEmptyError(f"User slot {user_index} is empty")
|
||||
|
||||
resolved_user_name = (
|
||||
user_name
|
||||
if user_name is not None
|
||||
else _get_attr(get_user_response, "userName")
|
||||
)
|
||||
resolved_unique_id = (
|
||||
user_unique_id
|
||||
if user_unique_id is not None
|
||||
else _get_attr(get_user_response, "userUniqueID")
|
||||
)
|
||||
|
||||
resolved_status = (
|
||||
USER_STATUS_REVERSE_MAP[user_status]
|
||||
if user_status is not None
|
||||
else _get_attr(get_user_response, "userStatus")
|
||||
)
|
||||
|
||||
resolved_type = (
|
||||
USER_TYPE_REVERSE_MAP[user_type]
|
||||
if user_type is not None
|
||||
else _get_attr(get_user_response, "userType")
|
||||
)
|
||||
|
||||
resolved_rule = (
|
||||
CREDENTIAL_RULE_REVERSE_MAP[credential_rule]
|
||||
if credential_rule is not None
|
||||
else _get_attr(get_user_response, "credentialRule")
|
||||
)
|
||||
|
||||
await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.SetUser(
|
||||
operationType=clusters.DoorLock.Enums.DataOperationTypeEnum.kModify,
|
||||
userIndex=user_index,
|
||||
userName=resolved_user_name,
|
||||
userUniqueID=resolved_unique_id,
|
||||
userStatus=resolved_status,
|
||||
userType=resolved_type,
|
||||
credentialRule=resolved_rule,
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
return SetLockUserResult(user_index=user_index)
|
||||
|
||||
|
||||
async def get_lock_users(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
) -> GetLockUsersResult:
|
||||
"""Get all users from the lock.
|
||||
|
||||
Returns typed dict with users list and max_users capacity.
|
||||
Raises HomeAssistantError on failure.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
|
||||
max_users = (
|
||||
lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.NumberOfTotalUsersSupported
|
||||
)
|
||||
or 0
|
||||
)
|
||||
|
||||
users: list[LockUserData] = []
|
||||
current_index = 1
|
||||
|
||||
# Iterate through users using next_user_index for efficiency
|
||||
while current_index is not None and current_index <= max_users:
|
||||
get_user_response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetUser(
|
||||
userIndex=current_index,
|
||||
),
|
||||
)
|
||||
|
||||
user_data = _format_user_response(get_user_response)
|
||||
if user_data is not None:
|
||||
users.append(user_data)
|
||||
|
||||
# Move to next user index
|
||||
next_index = _get_attr(get_user_response, "nextUserIndex")
|
||||
if next_index is None or next_index <= current_index:
|
||||
break
|
||||
current_index = next_index
|
||||
|
||||
return GetLockUsersResult(
|
||||
max_users=max_users,
|
||||
users=users,
|
||||
)
|
||||
|
||||
|
||||
async def clear_lock_user(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
user_index: int,
|
||||
) -> None:
|
||||
"""Clear a user from the lock.
|
||||
|
||||
Per the Matter spec, ClearUser also clears all associated credentials
|
||||
and schedules for the user.
|
||||
Use index 0xFFFE (CLEAR_ALL_INDEX) to clear all users.
|
||||
Raises HomeAssistantError on failure.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
|
||||
await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.ClearUser(
|
||||
userIndex=user_index,
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
|
||||
# --- Credential validation helpers ---
|
||||
|
||||
# Map credential type strings to the feature bit that must be set
|
||||
_CREDENTIAL_TYPE_FEATURE_MAP: dict[str, int] = {
|
||||
CRED_TYPE_PIN: DoorLockFeature.kPinCredential,
|
||||
CRED_TYPE_RFID: DoorLockFeature.kRfidCredential,
|
||||
CRED_TYPE_FINGERPRINT: DoorLockFeature.kFingerCredentials,
|
||||
CRED_TYPE_FINGER_VEIN: DoorLockFeature.kFingerCredentials,
|
||||
CRED_TYPE_FACE: DoorLockFeature.kFaceCredentials,
|
||||
}
|
||||
|
||||
# Map credential type strings to the capacity attribute for slot iteration.
|
||||
# Biometric types have no dedicated capacity attribute; fall back to total users.
|
||||
_CREDENTIAL_TYPE_CAPACITY_ATTR = {
|
||||
CRED_TYPE_PIN: clusters.DoorLock.Attributes.NumberOfPINUsersSupported,
|
||||
CRED_TYPE_RFID: clusters.DoorLock.Attributes.NumberOfRFIDUsersSupported,
|
||||
}
|
||||
|
||||
|
||||
def _validate_credential_type_support(
|
||||
lock_endpoint: MatterEndpoint, credential_type: str
|
||||
) -> None:
|
||||
"""Validate the lock supports the requested credential type.
|
||||
|
||||
Raises CredentialTypeNotSupportedError if not supported.
|
||||
"""
|
||||
required_bit = _CREDENTIAL_TYPE_FEATURE_MAP.get(credential_type)
|
||||
if required_bit is None:
|
||||
raise CredentialTypeNotSupportedError(
|
||||
translation_domain="matter",
|
||||
translation_key=ERR_CREDENTIAL_TYPE_NOT_SUPPORTED,
|
||||
translation_placeholders={"credential_type": credential_type},
|
||||
)
|
||||
|
||||
feature_map = _get_feature_map(lock_endpoint) or 0
|
||||
if not (feature_map & required_bit):
|
||||
raise CredentialTypeNotSupportedError(
|
||||
translation_domain="matter",
|
||||
translation_key=ERR_CREDENTIAL_TYPE_NOT_SUPPORTED,
|
||||
translation_placeholders={"credential_type": credential_type},
|
||||
)
|
||||
|
||||
|
||||
def _validate_credential_data(
|
||||
lock_endpoint: MatterEndpoint, credential_type: str, credential_data: str
|
||||
) -> None:
|
||||
"""Validate credential data against lock constraints.
|
||||
|
||||
For PIN: checks digits-only and length against Min/MaxPINCodeLength.
|
||||
For RFID: checks valid hex and byte length against Min/MaxRFIDCodeLength.
|
||||
Raises CredentialDataInvalidError on failure.
|
||||
"""
|
||||
if credential_type == CRED_TYPE_PIN:
|
||||
if not credential_data.isdigit():
|
||||
raise CredentialDataInvalidError(
|
||||
translation_domain="matter",
|
||||
translation_key=ERR_INVALID_CREDENTIAL_DATA,
|
||||
translation_placeholders={"reason": "PIN must contain only digits"},
|
||||
)
|
||||
min_len = (
|
||||
lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MinPINCodeLength
|
||||
)
|
||||
or 0
|
||||
)
|
||||
max_len = (
|
||||
lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MaxPINCodeLength
|
||||
)
|
||||
or 255
|
||||
)
|
||||
if not min_len <= len(credential_data) <= max_len:
|
||||
raise CredentialDataInvalidError(
|
||||
translation_domain="matter",
|
||||
translation_key=ERR_INVALID_CREDENTIAL_DATA,
|
||||
translation_placeholders={
|
||||
"reason": (f"PIN length must be between {min_len} and {max_len}")
|
||||
},
|
||||
)
|
||||
|
||||
elif credential_type == CRED_TYPE_RFID:
|
||||
try:
|
||||
rfid_bytes = bytes.fromhex(credential_data)
|
||||
except ValueError as err:
|
||||
raise CredentialDataInvalidError(
|
||||
translation_domain="matter",
|
||||
translation_key=ERR_INVALID_CREDENTIAL_DATA,
|
||||
translation_placeholders={
|
||||
"reason": "RFID data must be valid hexadecimal"
|
||||
},
|
||||
) from err
|
||||
min_len = (
|
||||
lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MinRFIDCodeLength
|
||||
)
|
||||
or 0
|
||||
)
|
||||
max_len = (
|
||||
lock_endpoint.get_attribute_value(
|
||||
None, clusters.DoorLock.Attributes.MaxRFIDCodeLength
|
||||
)
|
||||
or 255
|
||||
)
|
||||
if not min_len <= len(rfid_bytes) <= max_len:
|
||||
raise CredentialDataInvalidError(
|
||||
translation_domain="matter",
|
||||
translation_key=ERR_INVALID_CREDENTIAL_DATA,
|
||||
translation_placeholders={
|
||||
"reason": (
|
||||
f"RFID data length must be between"
|
||||
f" {min_len} and {max_len} bytes"
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def _credential_data_to_bytes(credential_type: str, credential_data: str) -> bytes:
|
||||
"""Convert credential data string to bytes for the Matter command."""
|
||||
if credential_type == CRED_TYPE_RFID:
|
||||
return bytes.fromhex(credential_data)
|
||||
# PIN and other types: encode as UTF-8
|
||||
return credential_data.encode()
|
||||
|
||||
|
||||
# --- Credential business logic functions ---
|
||||
|
||||
|
||||
async def set_lock_credential(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
*,
|
||||
credential_type: str,
|
||||
credential_data: str,
|
||||
credential_index: int | None = None,
|
||||
user_index: int | None = None,
|
||||
user_status: str | None = None,
|
||||
user_type: str | None = None,
|
||||
) -> SetLockCredentialResult:
|
||||
"""Add or modify a credential on the lock.
|
||||
|
||||
Returns typed dict with credential_index, user_index, and next_credential_index.
|
||||
Raises ServiceValidationError for validation failures.
|
||||
Raises HomeAssistantError for device communication failures.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
_validate_credential_type_support(lock_endpoint, credential_type)
|
||||
_validate_credential_data(lock_endpoint, credential_type, credential_data)
|
||||
|
||||
cred_type_int = CREDENTIAL_TYPE_REVERSE_MAP[credential_type]
|
||||
cred_data_bytes = _credential_data_to_bytes(credential_type, credential_data)
|
||||
|
||||
# Determine operation type and credential index
|
||||
operation_type = clusters.DoorLock.Enums.DataOperationTypeEnum.kAdd
|
||||
|
||||
if credential_index is None:
|
||||
# Auto-find first available credential slot.
|
||||
# Use the credential-type-specific capacity as the upper bound.
|
||||
max_creds_attr = _CREDENTIAL_TYPE_CAPACITY_ATTR.get(
|
||||
credential_type,
|
||||
clusters.DoorLock.Attributes.NumberOfTotalUsersSupported,
|
||||
)
|
||||
max_creds_raw = lock_endpoint.get_attribute_value(None, max_creds_attr)
|
||||
max_creds = (
|
||||
max_creds_raw if isinstance(max_creds_raw, int) and max_creds_raw > 0 else 5
|
||||
)
|
||||
for idx in range(1, max_creds + 1):
|
||||
status_response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetCredentialStatus(
|
||||
credential=clusters.DoorLock.Structs.CredentialStruct(
|
||||
credentialType=cred_type_int,
|
||||
credentialIndex=idx,
|
||||
),
|
||||
),
|
||||
)
|
||||
if not _get_attr(status_response, "credentialExists"):
|
||||
credential_index = idx
|
||||
break
|
||||
|
||||
if credential_index is None:
|
||||
raise NoAvailableUserSlotsError("No available credential slots on the lock")
|
||||
else:
|
||||
# Check if slot is occupied to determine Add vs Modify
|
||||
status_response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetCredentialStatus(
|
||||
credential=clusters.DoorLock.Structs.CredentialStruct(
|
||||
credentialType=cred_type_int,
|
||||
credentialIndex=credential_index,
|
||||
),
|
||||
),
|
||||
)
|
||||
if _get_attr(status_response, "credentialExists"):
|
||||
operation_type = clusters.DoorLock.Enums.DataOperationTypeEnum.kModify
|
||||
|
||||
# Resolve optional user_status and user_type enums
|
||||
resolved_user_status = (
|
||||
USER_STATUS_REVERSE_MAP.get(user_status) if user_status is not None else None
|
||||
)
|
||||
resolved_user_type = (
|
||||
USER_TYPE_REVERSE_MAP.get(user_type) if user_type is not None else None
|
||||
)
|
||||
|
||||
set_cred_response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.SetCredential(
|
||||
operationType=operation_type,
|
||||
credential=clusters.DoorLock.Structs.CredentialStruct(
|
||||
credentialType=cred_type_int,
|
||||
credentialIndex=credential_index,
|
||||
),
|
||||
credentialData=cred_data_bytes,
|
||||
userIndex=user_index,
|
||||
userStatus=resolved_user_status,
|
||||
userType=resolved_user_type,
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
status_code = _get_attr(set_cred_response, "status")
|
||||
status_str = SET_CREDENTIAL_STATUS_MAP.get(status_code, f"unknown({status_code})")
|
||||
if status_str != "success":
|
||||
raise SetCredentialFailedError(
|
||||
translation_domain="matter",
|
||||
translation_key="set_credential_failed",
|
||||
translation_placeholders={"status": status_str},
|
||||
)
|
||||
|
||||
return SetLockCredentialResult(
|
||||
credential_index=credential_index,
|
||||
user_index=_get_attr(set_cred_response, "userIndex"),
|
||||
next_credential_index=_get_attr(set_cred_response, "nextCredentialIndex"),
|
||||
)
|
||||
|
||||
|
||||
async def clear_lock_credential(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
*,
|
||||
credential_type: str,
|
||||
credential_index: int,
|
||||
) -> None:
|
||||
"""Clear a credential from the lock.
|
||||
|
||||
Raises HomeAssistantError on failure.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
|
||||
cred_type_int = CREDENTIAL_TYPE_REVERSE_MAP[credential_type]
|
||||
|
||||
await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.ClearCredential(
|
||||
credential=clusters.DoorLock.Structs.CredentialStruct(
|
||||
credentialType=cred_type_int,
|
||||
credentialIndex=credential_index,
|
||||
),
|
||||
),
|
||||
timed_request_timeout_ms=LOCK_TIMED_REQUEST_TIMEOUT_MS,
|
||||
)
|
||||
|
||||
|
||||
async def get_lock_credential_status(
|
||||
matter_client: MatterClient,
|
||||
node: MatterNode,
|
||||
*,
|
||||
credential_type: str,
|
||||
credential_index: int,
|
||||
) -> GetLockCredentialStatusResult:
|
||||
"""Get the status of a credential slot on the lock.
|
||||
|
||||
Returns typed dict with credential_exists, user_index, next_credential_index.
|
||||
Raises HomeAssistantError on failure.
|
||||
"""
|
||||
lock_endpoint = _get_lock_endpoint_or_raise(node)
|
||||
_ensure_usr_support(lock_endpoint)
|
||||
|
||||
cred_type_int = CREDENTIAL_TYPE_REVERSE_MAP[credential_type]
|
||||
|
||||
response = await matter_client.send_device_command(
|
||||
node_id=node.node_id,
|
||||
endpoint_id=lock_endpoint.endpoint_id,
|
||||
command=clusters.DoorLock.Commands.GetCredentialStatus(
|
||||
credential=clusters.DoorLock.Structs.CredentialStruct(
|
||||
credentialType=cred_type_int,
|
||||
credentialIndex=credential_index,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
return GetLockCredentialStatusResult(
|
||||
credential_exists=bool(_get_attr(response, "credentialExists")),
|
||||
user_index=_get_attr(response, "userIndex"),
|
||||
next_credential_index=_get_attr(response, "nextCredentialIndex"),
|
||||
)
|
||||
@@ -4,11 +4,27 @@ from __future__ import annotations
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
|
||||
from homeassistant.components.water_heater import DOMAIN as WATER_HEATER_DOMAIN
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant, SupportsResponse, callback
|
||||
from homeassistant.helpers import config_validation as cv, service
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import (
|
||||
ATTR_CREDENTIAL_DATA,
|
||||
ATTR_CREDENTIAL_INDEX,
|
||||
ATTR_CREDENTIAL_RULE,
|
||||
ATTR_CREDENTIAL_TYPE,
|
||||
ATTR_USER_INDEX,
|
||||
ATTR_USER_NAME,
|
||||
ATTR_USER_STATUS,
|
||||
ATTR_USER_TYPE,
|
||||
CLEAR_ALL_INDEX,
|
||||
CREDENTIAL_RULE_REVERSE_MAP,
|
||||
CREDENTIAL_TYPE_REVERSE_MAP,
|
||||
DOMAIN,
|
||||
SERVICE_CREDENTIAL_TYPES,
|
||||
USER_TYPE_REVERSE_MAP,
|
||||
)
|
||||
|
||||
ATTR_DURATION = "duration"
|
||||
ATTR_EMERGENCY_BOOST = "emergency_boost"
|
||||
@@ -36,3 +52,108 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
},
|
||||
func="async_set_boost",
|
||||
)
|
||||
|
||||
# Lock services - Full user CRUD
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"set_lock_user",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Optional(ATTR_USER_INDEX): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(ATTR_USER_NAME): vol.Any(str, None),
|
||||
vol.Optional(ATTR_USER_TYPE): vol.In(USER_TYPE_REVERSE_MAP.keys()),
|
||||
vol.Optional(ATTR_CREDENTIAL_RULE): vol.In(
|
||||
CREDENTIAL_RULE_REVERSE_MAP.keys()
|
||||
),
|
||||
},
|
||||
func="async_set_lock_user",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"clear_lock_user",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_USER_INDEX): vol.All(
|
||||
vol.Coerce(int),
|
||||
vol.Any(vol.Range(min=1), CLEAR_ALL_INDEX),
|
||||
),
|
||||
},
|
||||
func="async_clear_lock_user",
|
||||
)
|
||||
|
||||
# Lock services - Query operations
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"get_lock_info",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={},
|
||||
func="async_get_lock_info",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"get_lock_users",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={},
|
||||
func="async_get_lock_users",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
# Lock services - Credential management
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"set_lock_credential",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CREDENTIAL_TYPE): vol.In(SERVICE_CREDENTIAL_TYPES),
|
||||
vol.Required(ATTR_CREDENTIAL_DATA): str,
|
||||
vol.Optional(ATTR_CREDENTIAL_INDEX): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0)
|
||||
),
|
||||
vol.Optional(ATTR_USER_INDEX): vol.All(vol.Coerce(int), vol.Range(min=1)),
|
||||
vol.Optional(ATTR_USER_STATUS): vol.In(
|
||||
["occupied_enabled", "occupied_disabled"]
|
||||
),
|
||||
vol.Optional(ATTR_USER_TYPE): vol.In(USER_TYPE_REVERSE_MAP.keys()),
|
||||
},
|
||||
func="async_set_lock_credential",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"clear_lock_credential",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CREDENTIAL_TYPE): vol.In(SERVICE_CREDENTIAL_TYPES),
|
||||
vol.Required(ATTR_CREDENTIAL_INDEX): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0)
|
||||
),
|
||||
},
|
||||
func="async_clear_lock_credential",
|
||||
)
|
||||
|
||||
service.async_register_platform_entity_service(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"get_lock_credential_status",
|
||||
entity_domain=LOCK_DOMAIN,
|
||||
schema={
|
||||
vol.Required(ATTR_CREDENTIAL_TYPE): vol.In(
|
||||
CREDENTIAL_TYPE_REVERSE_MAP.keys()
|
||||
),
|
||||
vol.Required(ATTR_CREDENTIAL_INDEX): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0)
|
||||
),
|
||||
},
|
||||
func="async_get_lock_credential_status",
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
|
||||
@@ -1,3 +1,177 @@
|
||||
clear_lock_credential:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
fields:
|
||||
credential_type:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- pin
|
||||
- rfid
|
||||
- fingerprint
|
||||
- finger_vein
|
||||
- face
|
||||
required: true
|
||||
credential_index:
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 65534
|
||||
step: 1
|
||||
mode: box
|
||||
required: true
|
||||
|
||||
clear_lock_user:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
fields:
|
||||
user_index:
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 65534
|
||||
step: 1
|
||||
mode: box
|
||||
required: true
|
||||
|
||||
get_lock_credential_status:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
fields:
|
||||
credential_type:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- programming_pin
|
||||
- pin
|
||||
- rfid
|
||||
- fingerprint
|
||||
- finger_vein
|
||||
- face
|
||||
- aliro_credential_issuer_key
|
||||
- aliro_evictable_endpoint_key
|
||||
- aliro_non_evictable_endpoint_key
|
||||
required: true
|
||||
credential_index:
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 65534
|
||||
step: 1
|
||||
mode: box
|
||||
required: true
|
||||
|
||||
get_lock_info:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
|
||||
get_lock_users:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
|
||||
set_lock_credential:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
fields:
|
||||
credential_type:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- pin
|
||||
- rfid
|
||||
- fingerprint
|
||||
- finger_vein
|
||||
- face
|
||||
required: true
|
||||
credential_data:
|
||||
selector:
|
||||
text:
|
||||
required: true
|
||||
credential_index:
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 65534
|
||||
step: 1
|
||||
mode: box
|
||||
user_index:
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 65534
|
||||
step: 1
|
||||
mode: box
|
||||
user_status:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- occupied_enabled
|
||||
- occupied_disabled
|
||||
user_type:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- unrestricted_user
|
||||
- year_day_schedule_user
|
||||
- week_day_schedule_user
|
||||
- programming_user
|
||||
- non_access_user
|
||||
- forced_user
|
||||
- disposable_user
|
||||
- expiring_user
|
||||
- schedule_restricted_user
|
||||
- remote_only_user
|
||||
|
||||
set_lock_user:
|
||||
target:
|
||||
entity:
|
||||
domain: lock
|
||||
integration: matter
|
||||
fields:
|
||||
user_index:
|
||||
selector:
|
||||
number:
|
||||
min: 1
|
||||
max: 255
|
||||
step: 1
|
||||
mode: box
|
||||
user_name:
|
||||
selector:
|
||||
text:
|
||||
user_type:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- unrestricted_user
|
||||
- year_day_schedule_user
|
||||
- week_day_schedule_user
|
||||
- programming_user
|
||||
- non_access_user
|
||||
- forced_user
|
||||
- disposable_user
|
||||
- expiring_user
|
||||
- schedule_restricted_user
|
||||
- remote_only_user
|
||||
credential_rule:
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- single
|
||||
- dual
|
||||
- tri
|
||||
|
||||
water_heater_boost:
|
||||
target:
|
||||
entity:
|
||||
|
||||
@@ -619,6 +619,17 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"credential_type_not_supported": {
|
||||
"message": "The lock does not support credential type `{credential_type}`."
|
||||
},
|
||||
"invalid_credential_data": {
|
||||
"message": "Invalid credential data: {reason}."
|
||||
},
|
||||
"set_credential_failed": {
|
||||
"message": "Failed to set credential: lock returned status `{status}`."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"server_version_version_too_new": {
|
||||
"description": "The version of the Matter Server you are currently running is too new for this version of Home Assistant. Please update Home Assistant or downgrade the Matter Server to an older version to fix this issue.",
|
||||
@@ -630,6 +641,52 @@
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"clear_lock_credential": {
|
||||
"description": "Removes a credential from a lock.",
|
||||
"fields": {
|
||||
"credential_index": {
|
||||
"description": "The credential slot index to clear.",
|
||||
"name": "Credential index"
|
||||
},
|
||||
"credential_type": {
|
||||
"description": "The type of credential to clear.",
|
||||
"name": "Credential type"
|
||||
}
|
||||
},
|
||||
"name": "Clear lock credential"
|
||||
},
|
||||
"clear_lock_user": {
|
||||
"description": "Deletes a lock user and all associated credentials. Use index 65534 to clear all users.",
|
||||
"fields": {
|
||||
"user_index": {
|
||||
"description": "The user slot index (1-based) to clear, or 65534 to clear all.",
|
||||
"name": "User index"
|
||||
}
|
||||
},
|
||||
"name": "Clear lock user"
|
||||
},
|
||||
"get_lock_credential_status": {
|
||||
"description": "Returns the status of a credential slot on a lock.",
|
||||
"fields": {
|
||||
"credential_index": {
|
||||
"description": "The credential slot index to query.",
|
||||
"name": "Credential index"
|
||||
},
|
||||
"credential_type": {
|
||||
"description": "The type of credential to query.",
|
||||
"name": "Credential type"
|
||||
}
|
||||
},
|
||||
"name": "Get lock credential status"
|
||||
},
|
||||
"get_lock_info": {
|
||||
"description": "Returns lock capabilities including supported credential types, user capacity, and PIN length constraints.",
|
||||
"name": "Get lock info"
|
||||
},
|
||||
"get_lock_users": {
|
||||
"description": "Returns all users configured on a lock with their credentials.",
|
||||
"name": "Get lock users"
|
||||
},
|
||||
"open_commissioning_window": {
|
||||
"description": "Allows adding one of your devices to another Matter network by opening the commissioning window for this Matter device for 60 seconds.",
|
||||
"fields": {
|
||||
@@ -640,6 +697,58 @@
|
||||
},
|
||||
"name": "Open commissioning window"
|
||||
},
|
||||
"set_lock_credential": {
|
||||
"description": "Adds or updates a credential on a lock.",
|
||||
"fields": {
|
||||
"credential_data": {
|
||||
"description": "The credential data. For PIN: digits only. For RFID: hexadecimal string.",
|
||||
"name": "Credential data"
|
||||
},
|
||||
"credential_index": {
|
||||
"description": "The credential slot index. Leave empty to auto-find an available slot.",
|
||||
"name": "Credential index"
|
||||
},
|
||||
"credential_type": {
|
||||
"description": "The type of credential (e.g., pin, rfid, fingerprint).",
|
||||
"name": "Credential type"
|
||||
},
|
||||
"user_index": {
|
||||
"description": "The user index to associate the credential with. Leave empty for automatic assignment.",
|
||||
"name": "User index"
|
||||
},
|
||||
"user_status": {
|
||||
"description": "The user status to set when creating a new user for this credential.",
|
||||
"name": "User status"
|
||||
},
|
||||
"user_type": {
|
||||
"description": "The user type to set when creating a new user for this credential.",
|
||||
"name": "User type"
|
||||
}
|
||||
},
|
||||
"name": "Set lock credential"
|
||||
},
|
||||
"set_lock_user": {
|
||||
"description": "Creates or updates a lock user.",
|
||||
"fields": {
|
||||
"credential_rule": {
|
||||
"description": "The credential rule for the user.",
|
||||
"name": "Credential rule"
|
||||
},
|
||||
"user_index": {
|
||||
"description": "The user slot index (1-based). Leave empty to auto-find an available slot.",
|
||||
"name": "User index"
|
||||
},
|
||||
"user_name": {
|
||||
"description": "The name for the user.",
|
||||
"name": "User name"
|
||||
},
|
||||
"user_type": {
|
||||
"description": "The type of user to create.",
|
||||
"name": "User type"
|
||||
}
|
||||
},
|
||||
"name": "Set lock user"
|
||||
},
|
||||
"water_heater_boost": {
|
||||
"description": "Enables water heater boost for a specific duration.",
|
||||
"fields": {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user