mirror of
https://github.com/home-assistant/core.git
synced 2026-03-09 15:44:00 +01:00
Compare commits
106 Commits
epenet/nut
...
knx-sensor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf6c4884f0 | ||
|
|
2a8b045f43 | ||
|
|
281f439bc9 | ||
|
|
71b420b433 | ||
|
|
2f02d0f0dc | ||
|
|
37cb3cbd50 | ||
|
|
beec21c4a9 | ||
|
|
642f603ea2 | ||
|
|
a3d8d76678 | ||
|
|
c25feaa62b | ||
|
|
50bde6fccd | ||
|
|
1b7398c271 | ||
|
|
7e4b8e802e | ||
|
|
4bcea27151 | ||
|
|
ffca43027f | ||
|
|
01e94ca5b2 | ||
|
|
b8ea6b4162 | ||
|
|
1471cb93bc | ||
|
|
2f7ac2b439 | ||
|
|
0accb403be | ||
|
|
f49a323faf | ||
|
|
21d303dbbc | ||
|
|
c080a460a2 | ||
|
|
75d675f299 | ||
|
|
a7e7d01b7a | ||
|
|
8a0569e279 | ||
|
|
e8279bd20f | ||
|
|
852dbf8986 | ||
|
|
6f0eb1d07a | ||
|
|
6f68d91593 | ||
|
|
ffc17b6e91 | ||
|
|
0d04d79844 | ||
|
|
f57884cb95 | ||
|
|
3a83fe5c72 | ||
|
|
973feb71c1 | ||
|
|
ecee23fc7a | ||
|
|
442d2282dc | ||
|
|
8853d3e17d | ||
|
|
6d1e387911 | ||
|
|
13fe135e7f | ||
|
|
618687ea05 | ||
|
|
8b545a6e76 | ||
|
|
42fa13200d | ||
|
|
d56e944a86 | ||
|
|
fb357390ce | ||
|
|
702450e209 | ||
|
|
bbe45e0759 | ||
|
|
92902c7aa1 | ||
|
|
5d92dd7760 | ||
|
|
0ab62dabde | ||
|
|
fc68828c78 | ||
|
|
7644036592 | ||
|
|
f19068f7de | ||
|
|
13d2211755 | ||
|
|
87e63591d1 | ||
|
|
fc02bbcdd0 | ||
|
|
388d619604 | ||
|
|
3777acff95 | ||
|
|
e0fd6784cf | ||
|
|
305463d882 | ||
|
|
de16edc55b | ||
|
|
bd6438937b | ||
|
|
45e453791e | ||
|
|
152137a3a2 | ||
|
|
e059c51b1d | ||
|
|
9ef66a3a90 | ||
|
|
494f8c32d5 | ||
|
|
51f90a328b | ||
|
|
b7bdb7b32a | ||
|
|
76c8bae098 | ||
|
|
59a75e74fe | ||
|
|
a4af1ce5f8 | ||
|
|
30ea0b4923 | ||
|
|
fb889dd524 | ||
|
|
31055c5cde | ||
|
|
a264e5949f | ||
|
|
84260ac3f7 | ||
|
|
f50a35877d | ||
|
|
6bc94a318a | ||
|
|
b0904917ca | ||
|
|
536cfc4c67 | ||
|
|
27b647fa36 | ||
|
|
16fb2dfa91 | ||
|
|
664b75e060 | ||
|
|
1cd302eb17 | ||
|
|
8da86796d2 | ||
|
|
33c0edc994 | ||
|
|
3e8833da54 | ||
|
|
3858d557b3 | ||
|
|
0923bed4b6 | ||
|
|
9b8432eac3 | ||
|
|
5232c05702 | ||
|
|
e5f77801a7 | ||
|
|
bc138b3485 | ||
|
|
ae90c5fa92 | ||
|
|
2fce45abe1 | ||
|
|
e4417f7b00 | ||
|
|
b57c7f8a95 | ||
|
|
0618460d73 | ||
|
|
92dd045772 | ||
|
|
fc723e1a42 | ||
|
|
5907356309 | ||
|
|
1c221b4714 | ||
|
|
05d57167d2 | ||
|
|
69a98dd53e | ||
|
|
3c7dd93c7f |
318
.github/copilot-instructions.md
vendored
318
.github/copilot-instructions.md
vendored
@@ -7,328 +7,20 @@ This repository contains the core of Home Assistant, a Python 3 based home autom
|
||||
|
||||
## Code Review Guidelines
|
||||
|
||||
**When reviewing code, do NOT comment on:**
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
- **Language Features**: Use the newest features when possible:
|
||||
- Pattern matching
|
||||
- Type hints
|
||||
- f-strings (preferred over `%` or `.format()`)
|
||||
- Dataclasses
|
||||
- Walrus operator
|
||||
|
||||
### Strict Typing (Platinum)
|
||||
- **Comprehensive Type Hints**: Add type hints to all functions, methods, and variables
|
||||
- **Custom Config Entry Types**: When using runtime_data:
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
```
|
||||
- **Library Requirements**: Include `py.typed` file for PEP-561 compliance
|
||||
|
||||
## Code Quality Standards
|
||||
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
|
||||
- **Testing**: pytest with plain functions and fixtures
|
||||
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
||||
|
||||
### Writing Style Guidelines
|
||||
- **Tone**: Friendly and informative
|
||||
- **Perspective**: Use second-person ("you" and "your") for user-facing messages
|
||||
- **Inclusivity**: Use objective, non-discriminatory language
|
||||
- **Clarity**: Write for non-native English speakers
|
||||
- **Formatting in Messages**:
|
||||
- Use backticks for: file paths, filenames, variable names, field entries
|
||||
- Use sentence case for titles and messages (capitalize only the first word and proper nouns)
|
||||
- Avoid abbreviations when possible
|
||||
|
||||
### Documentation Standards
|
||||
- **File Headers**: Short and concise
|
||||
```python
|
||||
"""Integration for Peblar EV chargers."""
|
||||
```
|
||||
- **Method/Function Docstrings**: Required for all
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool:
|
||||
"""Set up Peblar from a config entry."""
|
||||
```
|
||||
- **Comment Style**:
|
||||
- Use clear, descriptive comments
|
||||
- Explain the "why" not just the "what"
|
||||
- Keep code block lines under 80 characters when possible
|
||||
- Use progressive disclosure (simple explanation first, complex details later)
|
||||
|
||||
## Async Programming
|
||||
|
||||
- All external I/O operations must be async
|
||||
- **Best Practices**:
|
||||
- Avoid sleeping in loops
|
||||
- Avoid awaiting in loops - use `gather` instead
|
||||
- No blocking calls
|
||||
- Group executor jobs when possible - switching between event loop and executor is expensive
|
||||
|
||||
### Blocking Operations
|
||||
- **Use Executor**: For blocking I/O operations
|
||||
```python
|
||||
result = await hass.async_add_executor_job(blocking_function, args)
|
||||
```
|
||||
- **Never Block Event Loop**: Avoid file operations, `time.sleep()`, blocking HTTP calls
|
||||
- **Replace with Async**: Use `asyncio.sleep()` instead of `time.sleep()`
|
||||
|
||||
### Thread Safety
|
||||
- **@callback Decorator**: For event loop safe functions
|
||||
```python
|
||||
@callback
|
||||
def async_update_callback(self, event):
|
||||
"""Safe to run in event loop."""
|
||||
self.async_write_ha_state()
|
||||
```
|
||||
- **Sync APIs from Threads**: Use sync versions when calling from non-event loop threads
|
||||
- **Registry Changes**: Must be done in event loop thread
|
||||
|
||||
### Error Handling
|
||||
- **Exception Types**: Choose most specific exception available
|
||||
- `ServiceValidationError`: User input errors (preferred over `ValueError`)
|
||||
- `HomeAssistantError`: Device communication failures
|
||||
- `ConfigEntryNotReady`: Temporary setup issues (device offline)
|
||||
- `ConfigEntryAuthFailed`: Authentication problems
|
||||
- `ConfigEntryError`: Permanent setup issues
|
||||
- **Try/Catch Best Practices**:
|
||||
- Only wrap code that can throw exceptions
|
||||
- Keep try blocks minimal - process data after the try/catch
|
||||
- **Avoid bare exceptions** except in specific cases:
|
||||
- ❌ Generally not allowed: `except:` or `except Exception:`
|
||||
- ✅ Allowed in config flows to ensure robustness
|
||||
- ✅ Allowed in functions/methods that run in background tasks
|
||||
- Bad pattern:
|
||||
```python
|
||||
try:
|
||||
data = await device.get_data() # Can throw
|
||||
# ❌ Don't process data inside try block
|
||||
processed = data.get("value", 0) * 100
|
||||
self._attr_native_value = processed
|
||||
except DeviceError:
|
||||
_LOGGER.error("Failed to get data")
|
||||
```
|
||||
- Good pattern:
|
||||
```python
|
||||
try:
|
||||
data = await device.get_data() # Can throw
|
||||
except DeviceError:
|
||||
_LOGGER.error("Failed to get data")
|
||||
return
|
||||
|
||||
# ✅ Process data outside try block
|
||||
processed = data.get("value", 0) * 100
|
||||
self._attr_native_value = processed
|
||||
```
|
||||
- **Bare Exception Usage**:
|
||||
```python
|
||||
# ❌ Not allowed in regular code
|
||||
try:
|
||||
data = await device.get_data()
|
||||
except Exception: # Too broad
|
||||
_LOGGER.error("Failed")
|
||||
|
||||
# ✅ Allowed in config flow for robustness
|
||||
async def async_step_user(self, user_input=None):
|
||||
try:
|
||||
await self._test_connection(user_input)
|
||||
except Exception: # Allowed here
|
||||
errors["base"] = "unknown"
|
||||
|
||||
# ✅ Allowed in background tasks
|
||||
async def _background_refresh():
|
||||
try:
|
||||
await coordinator.async_refresh()
|
||||
except Exception: # Allowed in task
|
||||
_LOGGER.exception("Unexpected error in background task")
|
||||
```
|
||||
- **Setup Failure Patterns**:
|
||||
```python
|
||||
try:
|
||||
await device.async_setup()
|
||||
except (asyncio.TimeoutError, TimeoutException) as ex:
|
||||
raise ConfigEntryNotReady(f"Timeout connecting to {device.host}") from ex
|
||||
except AuthFailed as ex:
|
||||
raise ConfigEntryAuthFailed(f"Credentials expired for {device.name}") from ex
|
||||
```
|
||||
|
||||
### Logging
|
||||
- **Format Guidelines**:
|
||||
- No periods at end of messages
|
||||
- No integration names/domains (added automatically)
|
||||
- No sensitive data (keys, tokens, passwords)
|
||||
- Use debug level for non-user-facing messages
|
||||
- **Use Lazy Logging**:
|
||||
```python
|
||||
_LOGGER.debug("This is a log message with %s", variable)
|
||||
```
|
||||
|
||||
### Unavailability Logging
|
||||
- **Log Once**: When device/service becomes unavailable (info level)
|
||||
- **Log Recovery**: When device/service comes back online
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
_unavailable_logged: bool = False
|
||||
|
||||
if not self._unavailable_logged:
|
||||
_LOGGER.info("The sensor is unavailable: %s", ex)
|
||||
self._unavailable_logged = True
|
||||
# On recovery:
|
||||
if self._unavailable_logged:
|
||||
_LOGGER.info("The sensor is back online")
|
||||
self._unavailable_logged = False
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Environment
|
||||
- **Local development (non-container)**: Activate the project venv before running commands: `source .venv/bin/activate`
|
||||
- **Dev container**: No activation needed, the environment is pre-configured
|
||||
.vscode/tasks.json contains useful commands used for development.
|
||||
|
||||
### Code Quality & Linting
|
||||
- **Run all linters on all files**: `prek run --all-files`
|
||||
- **Run linters on staged files only**: `prek run`
|
||||
- **PyLint on everything** (slow): `pylint homeassistant`
|
||||
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
|
||||
- **MyPy type checking (whole project)**: `mypy homeassistant/`
|
||||
- **MyPy on specific integration**: `mypy homeassistant/components/my_integration`
|
||||
## Python Syntax Notes
|
||||
|
||||
### Testing
|
||||
- **Quick test of changed files**: `pytest --timeout=10 --picked`
|
||||
- **Update test snapshots**: Add `--snapshot-update` to pytest command
|
||||
- ⚠️ Omit test results after using `--snapshot-update`
|
||||
- Always run tests again without the flag to verify snapshots
|
||||
- **Full test suite** (AVOID - very slow): `pytest ./tests`
|
||||
- Python 3.14 explicitly allows `except TypeA, TypeB:` without parentheses.
|
||||
|
||||
### Dependencies & Requirements
|
||||
- **Update generated files after dependency changes**: `python -m script.gen_requirements_all`
|
||||
- **Install all Python requirements**:
|
||||
```bash
|
||||
uv pip install -r requirements_all.txt -r requirements.txt -r requirements_test.txt
|
||||
```
|
||||
- **Install test requirements only**:
|
||||
```bash
|
||||
uv pip install -r requirements_test_all.txt -r requirements.txt
|
||||
```
|
||||
## Good practices
|
||||
|
||||
### Translations
|
||||
- **Update translations after strings.json changes**:
|
||||
```bash
|
||||
python -m script.translations develop --all
|
||||
```
|
||||
|
||||
### Project Validation
|
||||
- **Run hassfest** (checks project structure and updates generated files):
|
||||
```bash
|
||||
python -m script.hassfest
|
||||
```
|
||||
|
||||
## Common Anti-Patterns & Best Practices
|
||||
|
||||
### ❌ **Avoid These Patterns**
|
||||
```python
|
||||
# Blocking operations in event loop
|
||||
data = requests.get(url) # ❌ Blocks event loop
|
||||
time.sleep(5) # ❌ Blocks event loop
|
||||
|
||||
# Reusing BleakClient instances
|
||||
self.client = BleakClient(address)
|
||||
await self.client.connect()
|
||||
# Later...
|
||||
await self.client.connect() # ❌ Don't reuse
|
||||
|
||||
# Hardcoded strings in code
|
||||
self._attr_name = "Temperature Sensor" # ❌ Not translatable
|
||||
|
||||
# Missing error handling
|
||||
data = await self.api.get_data() # ❌ No exception handling
|
||||
|
||||
# Storing sensitive data in diagnostics
|
||||
return {"api_key": entry.data[CONF_API_KEY]} # ❌ Exposes secrets
|
||||
|
||||
# Accessing hass.data directly in tests
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id] # ❌ Don't access hass.data
|
||||
|
||||
# User-configurable polling intervals
|
||||
# In config flow
|
||||
vol.Optional("scan_interval", default=60): cv.positive_int # ❌ Not allowed
|
||||
# In coordinator
|
||||
update_interval = timedelta(minutes=entry.data.get("scan_interval", 1)) # ❌ Not allowed
|
||||
|
||||
# User-configurable config entry names (non-helper integrations)
|
||||
vol.Optional("name", default="My Device"): cv.string # ❌ Not allowed in regular integrations
|
||||
|
||||
# Too much code in try block
|
||||
try:
|
||||
response = await client.get_data() # Can throw
|
||||
# ❌ Data processing should be outside try block
|
||||
temperature = response["temperature"] / 10
|
||||
humidity = response["humidity"]
|
||||
self._attr_native_value = temperature
|
||||
except ClientError:
|
||||
_LOGGER.error("Failed to fetch data")
|
||||
|
||||
# Bare exceptions in regular code
|
||||
try:
|
||||
value = await sensor.read_value()
|
||||
except Exception: # ❌ Too broad - catch specific exceptions
|
||||
_LOGGER.error("Failed to read sensor")
|
||||
```
|
||||
|
||||
### ✅ **Use These Patterns Instead**
|
||||
```python
|
||||
# Async operations with executor
|
||||
data = await hass.async_add_executor_job(requests.get, url)
|
||||
await asyncio.sleep(5) # ✅ Non-blocking
|
||||
|
||||
# Fresh BleakClient instances
|
||||
client = BleakClient(address) # ✅ New instance each time
|
||||
await client.connect()
|
||||
|
||||
# Translatable entity names
|
||||
_attr_translation_key = "temperature_sensor" # ✅ Translatable
|
||||
|
||||
# Proper error handling
|
||||
try:
|
||||
data = await self.api.get_data()
|
||||
except ApiException as err:
|
||||
raise UpdateFailed(f"API error: {err}") from err
|
||||
|
||||
# Redacted diagnostics data
|
||||
return async_redact_data(data, {"api_key", "password"}) # ✅ Safe
|
||||
|
||||
# Test through proper integration setup and fixtures
|
||||
@pytest.fixture
|
||||
async def init_integration(hass, mock_config_entry, mock_api):
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id) # ✅ Proper setup
|
||||
|
||||
# Integration-determined polling intervals (not user-configurable)
|
||||
SCAN_INTERVAL = timedelta(minutes=5) # ✅ Common pattern: constant in const.py
|
||||
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
# ✅ Integration determines interval based on device capabilities, connection type, etc.
|
||||
interval = timedelta(minutes=1) if client.is_local else SCAN_INTERVAL
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=interval,
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
Integrations with Platinum or Gold level in the Integration Quality Scale reflect a high standard of code quality and maintainability. When looking for examples of something, these are good places to start. The level is indicated in the manifest.json of the integration.
|
||||
|
||||
|
||||
# Skills
|
||||
|
||||
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -614,7 +614,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@96278af6caaf10aea03fd8d33a09a777ca52d62f # v3.2.0
|
||||
uses: actions/attest-build-provenance@a2bbfa25375fe432b6a289bc6b6cd05ecd0c4c32 # v4.1.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
318
AGENTS.md
318
AGENTS.md
@@ -4,325 +4,17 @@ This repository contains the core of Home Assistant, a Python 3 based home autom
|
||||
|
||||
## Code Review Guidelines
|
||||
|
||||
**When reviewing code, do NOT comment on:**
|
||||
- **Missing imports** - We use static analysis tooling to catch that
|
||||
- **Code formatting** - We have ruff as a formatting tool that will catch those if needed (unless specifically instructed otherwise in these instructions)
|
||||
|
||||
**Git commit practices during review:**
|
||||
- **Do NOT amend, squash, or rebase commits after review has started** - Reviewers need to see what changed since their last review
|
||||
|
||||
## Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
- **Language Features**: Use the newest features when possible:
|
||||
- Pattern matching
|
||||
- Type hints
|
||||
- f-strings (preferred over `%` or `.format()`)
|
||||
- Dataclasses
|
||||
- Walrus operator
|
||||
|
||||
### Strict Typing (Platinum)
|
||||
- **Comprehensive Type Hints**: Add type hints to all functions, methods, and variables
|
||||
- **Custom Config Entry Types**: When using runtime_data:
|
||||
```python
|
||||
type MyIntegrationConfigEntry = ConfigEntry[MyClient]
|
||||
```
|
||||
- **Library Requirements**: Include `py.typed` file for PEP-561 compliance
|
||||
|
||||
## Code Quality Standards
|
||||
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
|
||||
- **Testing**: pytest with plain functions and fixtures
|
||||
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
||||
|
||||
### Writing Style Guidelines
|
||||
- **Tone**: Friendly and informative
|
||||
- **Perspective**: Use second-person ("you" and "your") for user-facing messages
|
||||
- **Inclusivity**: Use objective, non-discriminatory language
|
||||
- **Clarity**: Write for non-native English speakers
|
||||
- **Formatting in Messages**:
|
||||
- Use backticks for: file paths, filenames, variable names, field entries
|
||||
- Use sentence case for titles and messages (capitalize only the first word and proper nouns)
|
||||
- Avoid abbreviations when possible
|
||||
|
||||
### Documentation Standards
|
||||
- **File Headers**: Short and concise
|
||||
```python
|
||||
"""Integration for Peblar EV chargers."""
|
||||
```
|
||||
- **Method/Function Docstrings**: Required for all
|
||||
```python
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: PeblarConfigEntry) -> bool:
|
||||
"""Set up Peblar from a config entry."""
|
||||
```
|
||||
- **Comment Style**:
|
||||
- Use clear, descriptive comments
|
||||
- Explain the "why" not just the "what"
|
||||
- Keep code block lines under 80 characters when possible
|
||||
- Use progressive disclosure (simple explanation first, complex details later)
|
||||
|
||||
## Async Programming
|
||||
|
||||
- All external I/O operations must be async
|
||||
- **Best Practices**:
|
||||
- Avoid sleeping in loops
|
||||
- Avoid awaiting in loops - use `gather` instead
|
||||
- No blocking calls
|
||||
- Group executor jobs when possible - switching between event loop and executor is expensive
|
||||
|
||||
### Blocking Operations
|
||||
- **Use Executor**: For blocking I/O operations
|
||||
```python
|
||||
result = await hass.async_add_executor_job(blocking_function, args)
|
||||
```
|
||||
- **Never Block Event Loop**: Avoid file operations, `time.sleep()`, blocking HTTP calls
|
||||
- **Replace with Async**: Use `asyncio.sleep()` instead of `time.sleep()`
|
||||
|
||||
### Thread Safety
|
||||
- **@callback Decorator**: For event loop safe functions
|
||||
```python
|
||||
@callback
|
||||
def async_update_callback(self, event):
|
||||
"""Safe to run in event loop."""
|
||||
self.async_write_ha_state()
|
||||
```
|
||||
- **Sync APIs from Threads**: Use sync versions when calling from non-event loop threads
|
||||
- **Registry Changes**: Must be done in event loop thread
|
||||
|
||||
### Error Handling
|
||||
- **Exception Types**: Choose most specific exception available
|
||||
- `ServiceValidationError`: User input errors (preferred over `ValueError`)
|
||||
- `HomeAssistantError`: Device communication failures
|
||||
- `ConfigEntryNotReady`: Temporary setup issues (device offline)
|
||||
- `ConfigEntryAuthFailed`: Authentication problems
|
||||
- `ConfigEntryError`: Permanent setup issues
|
||||
- **Try/Catch Best Practices**:
|
||||
- Only wrap code that can throw exceptions
|
||||
- Keep try blocks minimal - process data after the try/catch
|
||||
- **Avoid bare exceptions** except in specific cases:
|
||||
- ❌ Generally not allowed: `except:` or `except Exception:`
|
||||
- ✅ Allowed in config flows to ensure robustness
|
||||
- ✅ Allowed in functions/methods that run in background tasks
|
||||
- Bad pattern:
|
||||
```python
|
||||
try:
|
||||
data = await device.get_data() # Can throw
|
||||
# ❌ Don't process data inside try block
|
||||
processed = data.get("value", 0) * 100
|
||||
self._attr_native_value = processed
|
||||
except DeviceError:
|
||||
_LOGGER.error("Failed to get data")
|
||||
```
|
||||
- Good pattern:
|
||||
```python
|
||||
try:
|
||||
data = await device.get_data() # Can throw
|
||||
except DeviceError:
|
||||
_LOGGER.error("Failed to get data")
|
||||
return
|
||||
|
||||
# ✅ Process data outside try block
|
||||
processed = data.get("value", 0) * 100
|
||||
self._attr_native_value = processed
|
||||
```
|
||||
- **Bare Exception Usage**:
|
||||
```python
|
||||
# ❌ Not allowed in regular code
|
||||
try:
|
||||
data = await device.get_data()
|
||||
except Exception: # Too broad
|
||||
_LOGGER.error("Failed")
|
||||
|
||||
# ✅ Allowed in config flow for robustness
|
||||
async def async_step_user(self, user_input=None):
|
||||
try:
|
||||
await self._test_connection(user_input)
|
||||
except Exception: # Allowed here
|
||||
errors["base"] = "unknown"
|
||||
|
||||
# ✅ Allowed in background tasks
|
||||
async def _background_refresh():
|
||||
try:
|
||||
await coordinator.async_refresh()
|
||||
except Exception: # Allowed in task
|
||||
_LOGGER.exception("Unexpected error in background task")
|
||||
```
|
||||
- **Setup Failure Patterns**:
|
||||
```python
|
||||
try:
|
||||
await device.async_setup()
|
||||
except (asyncio.TimeoutError, TimeoutException) as ex:
|
||||
raise ConfigEntryNotReady(f"Timeout connecting to {device.host}") from ex
|
||||
except AuthFailed as ex:
|
||||
raise ConfigEntryAuthFailed(f"Credentials expired for {device.name}") from ex
|
||||
```
|
||||
|
||||
### Logging
|
||||
- **Format Guidelines**:
|
||||
- No periods at end of messages
|
||||
- No integration names/domains (added automatically)
|
||||
- No sensitive data (keys, tokens, passwords)
|
||||
- Use debug level for non-user-facing messages
|
||||
- **Use Lazy Logging**:
|
||||
```python
|
||||
_LOGGER.debug("This is a log message with %s", variable)
|
||||
```
|
||||
|
||||
### Unavailability Logging
|
||||
- **Log Once**: When device/service becomes unavailable (info level)
|
||||
- **Log Recovery**: When device/service comes back online
|
||||
- **Implementation Pattern**:
|
||||
```python
|
||||
_unavailable_logged: bool = False
|
||||
|
||||
if not self._unavailable_logged:
|
||||
_LOGGER.info("The sensor is unavailable: %s", ex)
|
||||
self._unavailable_logged = True
|
||||
# On recovery:
|
||||
if self._unavailable_logged:
|
||||
_LOGGER.info("The sensor is back online")
|
||||
self._unavailable_logged = False
|
||||
```
|
||||
|
||||
## Development Commands
|
||||
|
||||
### Environment
|
||||
- **Local development (non-container)**: Activate the project venv before running commands: `source .venv/bin/activate`
|
||||
- **Dev container**: No activation needed, the environment is pre-configured
|
||||
.vscode/tasks.json contains useful commands used for development.
|
||||
|
||||
### Code Quality & Linting
|
||||
- **Run all linters on all files**: `prek run --all-files`
|
||||
- **Run linters on staged files only**: `prek run`
|
||||
- **PyLint on everything** (slow): `pylint homeassistant`
|
||||
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
|
||||
- **MyPy type checking (whole project)**: `mypy homeassistant/`
|
||||
- **MyPy on specific integration**: `mypy homeassistant/components/my_integration`
|
||||
## Python Syntax Notes
|
||||
|
||||
### Testing
|
||||
- **Quick test of changed files**: `pytest --timeout=10 --picked`
|
||||
- **Update test snapshots**: Add `--snapshot-update` to pytest command
|
||||
- ⚠️ Omit test results after using `--snapshot-update`
|
||||
- Always run tests again without the flag to verify snapshots
|
||||
- **Full test suite** (AVOID - very slow): `pytest ./tests`
|
||||
- Python 3.14 explicitly allows `except TypeA, TypeB:` without parentheses.
|
||||
|
||||
### Dependencies & Requirements
|
||||
- **Update generated files after dependency changes**: `python -m script.gen_requirements_all`
|
||||
- **Install all Python requirements**:
|
||||
```bash
|
||||
uv pip install -r requirements_all.txt -r requirements.txt -r requirements_test.txt
|
||||
```
|
||||
- **Install test requirements only**:
|
||||
```bash
|
||||
uv pip install -r requirements_test_all.txt -r requirements.txt
|
||||
```
|
||||
## Good practices
|
||||
|
||||
### Translations
|
||||
- **Update translations after strings.json changes**:
|
||||
```bash
|
||||
python -m script.translations develop --all
|
||||
```
|
||||
|
||||
### Project Validation
|
||||
- **Run hassfest** (checks project structure and updates generated files):
|
||||
```bash
|
||||
python -m script.hassfest
|
||||
```
|
||||
|
||||
## Common Anti-Patterns & Best Practices
|
||||
|
||||
### ❌ **Avoid These Patterns**
|
||||
```python
|
||||
# Blocking operations in event loop
|
||||
data = requests.get(url) # ❌ Blocks event loop
|
||||
time.sleep(5) # ❌ Blocks event loop
|
||||
|
||||
# Reusing BleakClient instances
|
||||
self.client = BleakClient(address)
|
||||
await self.client.connect()
|
||||
# Later...
|
||||
await self.client.connect() # ❌ Don't reuse
|
||||
|
||||
# Hardcoded strings in code
|
||||
self._attr_name = "Temperature Sensor" # ❌ Not translatable
|
||||
|
||||
# Missing error handling
|
||||
data = await self.api.get_data() # ❌ No exception handling
|
||||
|
||||
# Storing sensitive data in diagnostics
|
||||
return {"api_key": entry.data[CONF_API_KEY]} # ❌ Exposes secrets
|
||||
|
||||
# Accessing hass.data directly in tests
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id] # ❌ Don't access hass.data
|
||||
|
||||
# User-configurable polling intervals
|
||||
# In config flow
|
||||
vol.Optional("scan_interval", default=60): cv.positive_int # ❌ Not allowed
|
||||
# In coordinator
|
||||
update_interval = timedelta(minutes=entry.data.get("scan_interval", 1)) # ❌ Not allowed
|
||||
|
||||
# User-configurable config entry names (non-helper integrations)
|
||||
vol.Optional("name", default="My Device"): cv.string # ❌ Not allowed in regular integrations
|
||||
|
||||
# Too much code in try block
|
||||
try:
|
||||
response = await client.get_data() # Can throw
|
||||
# ❌ Data processing should be outside try block
|
||||
temperature = response["temperature"] / 10
|
||||
humidity = response["humidity"]
|
||||
self._attr_native_value = temperature
|
||||
except ClientError:
|
||||
_LOGGER.error("Failed to fetch data")
|
||||
|
||||
# Bare exceptions in regular code
|
||||
try:
|
||||
value = await sensor.read_value()
|
||||
except Exception: # ❌ Too broad - catch specific exceptions
|
||||
_LOGGER.error("Failed to read sensor")
|
||||
```
|
||||
|
||||
### ✅ **Use These Patterns Instead**
|
||||
```python
|
||||
# Async operations with executor
|
||||
data = await hass.async_add_executor_job(requests.get, url)
|
||||
await asyncio.sleep(5) # ✅ Non-blocking
|
||||
|
||||
# Fresh BleakClient instances
|
||||
client = BleakClient(address) # ✅ New instance each time
|
||||
await client.connect()
|
||||
|
||||
# Translatable entity names
|
||||
_attr_translation_key = "temperature_sensor" # ✅ Translatable
|
||||
|
||||
# Proper error handling
|
||||
try:
|
||||
data = await self.api.get_data()
|
||||
except ApiException as err:
|
||||
raise UpdateFailed(f"API error: {err}") from err
|
||||
|
||||
# Redacted diagnostics data
|
||||
return async_redact_data(data, {"api_key", "password"}) # ✅ Safe
|
||||
|
||||
# Test through proper integration setup and fixtures
|
||||
@pytest.fixture
|
||||
async def init_integration(hass, mock_config_entry, mock_api):
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id) # ✅ Proper setup
|
||||
|
||||
# Integration-determined polling intervals (not user-configurable)
|
||||
SCAN_INTERVAL = timedelta(minutes=5) # ✅ Common pattern: constant in const.py
|
||||
|
||||
class MyCoordinator(DataUpdateCoordinator[MyData]):
|
||||
def __init__(self, hass: HomeAssistant, client: MyClient, config_entry: ConfigEntry) -> None:
|
||||
# ✅ Integration determines interval based on device capabilities, connection type, etc.
|
||||
interval = timedelta(minutes=1) if client.is_local else SCAN_INTERVAL
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=interval,
|
||||
config_entry=config_entry, # ✅ Pass config_entry - it's accepted and recommended
|
||||
)
|
||||
```
|
||||
Integrations with Platinum or Gold level in the Integration Quality Scale reflect a high standard of code quality and maintainability. When looking for examples of something, these are good places to start. The level is indicated in the manifest.json of the integration.
|
||||
|
||||
15
CODEOWNERS
generated
15
CODEOWNERS
generated
@@ -281,6 +281,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/cert_expiry/ @jjlawren
|
||||
/homeassistant/components/chacon_dio/ @cnico
|
||||
/tests/components/chacon_dio/ @cnico
|
||||
/homeassistant/components/chess_com/ @joostlek
|
||||
/tests/components/chess_com/ @joostlek
|
||||
/homeassistant/components/cisco_ios/ @fbradyirl
|
||||
/homeassistant/components/cisco_mobility_express/ @fbradyirl
|
||||
/homeassistant/components/cisco_webex_teams/ @fbradyirl
|
||||
@@ -383,6 +385,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dlna_dms/ @chishm
|
||||
/homeassistant/components/dnsip/ @gjohansson-ST
|
||||
/tests/components/dnsip/ @gjohansson-ST
|
||||
/homeassistant/components/door/ @home-assistant/core
|
||||
/tests/components/door/ @home-assistant/core
|
||||
/homeassistant/components/doorbird/ @oblogic7 @bdraco @flacjacket
|
||||
/tests/components/doorbird/ @oblogic7 @bdraco @flacjacket
|
||||
/homeassistant/components/dormakaba_dkey/ @emontnemery
|
||||
@@ -1200,6 +1204,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/opendisplay/ @g4bri3lDev
|
||||
/tests/components/opendisplay/ @g4bri3lDev
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openevse/ @c00w @firstof9
|
||||
@@ -1305,8 +1311,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/prosegur/ @dgomes
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno @erwindouna
|
||||
/tests/components/proxmoxve/ @jhollowe @Corbeno @erwindouna
|
||||
/homeassistant/components/proxmoxve/ @Corbeno @erwindouna @CoMPaTech
|
||||
/tests/components/proxmoxve/ @Corbeno @erwindouna @CoMPaTech
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
@@ -1650,8 +1656,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/systemnexa2/ @konsulten @slangstrom
|
||||
/tests/components/systemnexa2/ @konsulten @slangstrom
|
||||
/homeassistant/components/systemnexa2/ @konsulten
|
||||
/tests/components/systemnexa2/ @konsulten
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @home-assistant/core
|
||||
@@ -1691,7 +1697,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/tessie/ @Bre77
|
||||
/homeassistant/components/text/ @home-assistant/core
|
||||
/tests/components/text/ @home-assistant/core
|
||||
/homeassistant/components/tfiac/ @fredrike @mellado
|
||||
/homeassistant/components/thermobeacon/ @bdraco
|
||||
/tests/components/thermobeacon/ @bdraco
|
||||
/homeassistant/components/thermopro/ @bdraco @h3ss
|
||||
|
||||
@@ -236,6 +236,12 @@ DEFAULT_INTEGRATIONS = {
|
||||
"input_text",
|
||||
"schedule",
|
||||
"timer",
|
||||
#
|
||||
# Base platforms:
|
||||
*BASE_PLATFORMS,
|
||||
#
|
||||
# Integrations providing triggers and conditions for base platforms:
|
||||
"door",
|
||||
}
|
||||
DEFAULT_INTEGRATIONS_RECOVERY_MODE = {
|
||||
# These integrations are set up if recovery mode is activated.
|
||||
|
||||
@@ -18,6 +18,10 @@ from homeassistant.helpers.schema_config_entry_flow import (
|
||||
SchemaOptionsFlowHandler,
|
||||
)
|
||||
from homeassistant.helpers.selector import BooleanSelector
|
||||
from homeassistant.helpers.service_info.zeroconf import (
|
||||
ATTR_PROPERTIES_ID,
|
||||
ZeroconfServiceInfo,
|
||||
)
|
||||
|
||||
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE, DOMAIN
|
||||
|
||||
@@ -46,6 +50,9 @@ class AirQConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
VERSION = 1
|
||||
|
||||
_discovered_host: str
|
||||
_discovered_name: str
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -90,6 +97,58 @@ class AirQConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle zeroconf discovery of an air-Q device."""
|
||||
self._discovered_host = discovery_info.host
|
||||
self._discovered_name = discovery_info.properties.get("devicename", "air-Q")
|
||||
device_id = discovery_info.properties.get(ATTR_PROPERTIES_ID)
|
||||
|
||||
if not device_id:
|
||||
return self.async_abort(reason="incomplete_discovery")
|
||||
|
||||
await self.async_set_unique_id(device_id)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_IP_ADDRESS: self._discovered_host},
|
||||
reload_on_update=True,
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = {"name": self._discovered_name}
|
||||
|
||||
return await self.async_step_discovery_confirm()
|
||||
|
||||
async def async_step_discovery_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle user confirmation of a discovered air-Q device."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
airq = AirQ(self._discovered_host, user_input[CONF_PASSWORD], session)
|
||||
try:
|
||||
await airq.validate()
|
||||
except ClientConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_name,
|
||||
data={
|
||||
CONF_IP_ADDRESS: self._discovered_host,
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="discovery_confirm",
|
||||
data_schema=vol.Schema({vol.Required(CONF_PASSWORD): str}),
|
||||
description_placeholders={"name": self._discovered_name},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
|
||||
@@ -7,5 +7,13 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairq"],
|
||||
"requirements": ["aioairq==0.4.7"]
|
||||
"requirements": ["aioairq==0.4.7"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"properties": {
|
||||
"device": "air-q"
|
||||
},
|
||||
"type": "_http._tcp.local."
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,14 +1,23 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"incomplete_discovery": "The discovered air-Q device did not provide a device ID. Ensure the firmware is up to date."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_input": "[%key:common::config_flow::error::invalid_host%]"
|
||||
},
|
||||
"flow_title": "{name}",
|
||||
"step": {
|
||||
"discovery_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"description": "Do you want to set up **{name}**?",
|
||||
"title": "Set up air-Q"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"ip_address": "[%key:common::config_flow::data::ip%]",
|
||||
|
||||
@@ -117,23 +117,23 @@ class AirtouchAC(CoordinatorEntity, ClimateEntity):
|
||||
return super()._handle_coordinator_update()
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> int:
|
||||
"""Return the current temperature."""
|
||||
return self._unit.Temperature
|
||||
|
||||
@property
|
||||
def fan_mode(self):
|
||||
def fan_mode(self) -> str:
|
||||
"""Return fan mode of the AC this group belongs to."""
|
||||
return AT_TO_HA_FAN_SPEED[self._airtouch.acs[self._ac_number].AcFanSpeed]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
def fan_modes(self) -> list[str]:
|
||||
"""Return the list of available fan modes."""
|
||||
airtouch_fan_speeds = self._airtouch.GetSupportedFanSpeedsForAc(self._ac_number)
|
||||
return [AT_TO_HA_FAN_SPEED[speed] for speed in airtouch_fan_speeds]
|
||||
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac target hvac state."""
|
||||
is_off = self._unit.PowerState == "Off"
|
||||
if is_off:
|
||||
@@ -236,17 +236,17 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
return self._airtouch.acs[self._unit.BelongsToAc].MaxSetpoint
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> int:
|
||||
"""Return the current temperature."""
|
||||
return self._unit.Temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
def target_temperature(self) -> int:
|
||||
"""Return the temperature we are trying to reach."""
|
||||
return self._unit.TargetSetpoint
|
||||
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac target hvac state."""
|
||||
# there are other power states that aren't 'on' but still count as on (eg. 'Turbo')
|
||||
is_off = self._unit.PowerState == "Off"
|
||||
@@ -272,12 +272,12 @@ class AirtouchGroup(CoordinatorEntity, ClimateEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def fan_mode(self):
|
||||
def fan_mode(self) -> str:
|
||||
"""Return fan mode of the AC this group belongs to."""
|
||||
return AT_TO_HA_FAN_SPEED[self._airtouch.acs[self._unit.BelongsToAc].AcFanSpeed]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
def fan_modes(self) -> list[str]:
|
||||
"""Return the list of available fan modes."""
|
||||
airtouch_fan_speeds = self._airtouch.GetSupportedFanSpeedsByGroup(
|
||||
self._group_number
|
||||
|
||||
@@ -7,13 +7,7 @@ from datetime import timedelta
|
||||
from math import ceil
|
||||
from typing import Any
|
||||
|
||||
from pyairvisual.cloud_api import (
|
||||
CloudAPI,
|
||||
InvalidKeyError,
|
||||
KeyExpiredError,
|
||||
UnauthorizedError,
|
||||
)
|
||||
from pyairvisual.errors import AirVisualError
|
||||
from pyairvisual.cloud_api import CloudAPI
|
||||
|
||||
from homeassistant.components import automation
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
@@ -28,14 +22,12 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import (
|
||||
aiohttp_client,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_CITY,
|
||||
@@ -47,8 +39,7 @@ from .const import (
|
||||
INTEGRATION_TYPE_NODE_PRO,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
type AirVisualConfigEntry = ConfigEntry[DataUpdateCoordinator]
|
||||
from .coordinator import AirVisualConfigEntry, AirVisualDataUpdateCoordinator
|
||||
|
||||
# We use a raw string for the airvisual_pro domain (instead of importing the actual
|
||||
# constant) so that we can avoid listing it as a dependency:
|
||||
@@ -85,8 +76,8 @@ def async_get_cloud_api_update_interval(
|
||||
@callback
|
||||
def async_get_cloud_coordinators_by_api_key(
|
||||
hass: HomeAssistant, api_key: str
|
||||
) -> list[DataUpdateCoordinator]:
|
||||
"""Get all DataUpdateCoordinator objects related to a particular API key."""
|
||||
) -> list[AirVisualDataUpdateCoordinator]:
|
||||
"""Get all AirVisualDataUpdateCoordinator objects related to a particular API key."""
|
||||
return [
|
||||
entry.runtime_data
|
||||
for entry in hass.config_entries.async_entries(DOMAIN)
|
||||
@@ -180,38 +171,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) ->
|
||||
websession = aiohttp_client.async_get_clientsession(hass)
|
||||
cloud_api = CloudAPI(entry.data[CONF_API_KEY], session=websession)
|
||||
|
||||
async def async_update_data() -> dict[str, Any]:
|
||||
"""Get new data from the API."""
|
||||
if CONF_CITY in entry.data:
|
||||
api_coro = cloud_api.air_quality.city(
|
||||
entry.data[CONF_CITY],
|
||||
entry.data[CONF_STATE],
|
||||
entry.data[CONF_COUNTRY],
|
||||
)
|
||||
else:
|
||||
api_coro = cloud_api.air_quality.nearest_city(
|
||||
entry.data[CONF_LATITUDE],
|
||||
entry.data[CONF_LONGITUDE],
|
||||
)
|
||||
|
||||
try:
|
||||
return await api_coro
|
||||
except (InvalidKeyError, KeyExpiredError, UnauthorizedError) as ex:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
except AirVisualError as err:
|
||||
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
coordinator = AirVisualDataUpdateCoordinator(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
entry,
|
||||
cloud_api,
|
||||
name=async_get_geography_id(entry.data),
|
||||
# We give a placeholder update interval in order to create the coordinator;
|
||||
# then, below, we use the coordinator's presence (along with any other
|
||||
# coordinators using the same API key) to calculate an actual, leveled
|
||||
# update interval:
|
||||
update_interval=timedelta(minutes=5),
|
||||
update_method=async_update_data,
|
||||
)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(async_reload_entry))
|
||||
|
||||
72
homeassistant/components/airvisual/coordinator.py
Normal file
72
homeassistant/components/airvisual/coordinator.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Define an AirVisual data coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from pyairvisual.cloud_api import (
|
||||
CloudAPI,
|
||||
InvalidKeyError,
|
||||
KeyExpiredError,
|
||||
UnauthorizedError,
|
||||
)
|
||||
from pyairvisual.errors import AirVisualError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_LATITUDE, CONF_LONGITUDE, CONF_STATE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import CONF_CITY, LOGGER
|
||||
|
||||
type AirVisualConfigEntry = ConfigEntry[AirVisualDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirVisualDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Class to manage fetching AirVisual data."""
|
||||
|
||||
config_entry: AirVisualConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AirVisualConfigEntry,
|
||||
cloud_api: CloudAPI,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self._cloud_api = cloud_api
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
name=name,
|
||||
# We give a placeholder update interval in order to create the coordinator;
|
||||
# then, in async_setup_entry, we use the coordinator's presence (along with
|
||||
# any other coordinators using the same API key) to calculate an actual,
|
||||
# leveled update interval:
|
||||
update_interval=timedelta(minutes=5),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Get new data from the API."""
|
||||
if CONF_CITY in self.config_entry.data:
|
||||
api_coro = self._cloud_api.air_quality.city(
|
||||
self.config_entry.data[CONF_CITY],
|
||||
self.config_entry.data[CONF_STATE],
|
||||
self.config_entry.data[CONF_COUNTRY],
|
||||
)
|
||||
else:
|
||||
api_coro = self._cloud_api.air_quality.nearest_city(
|
||||
self.config_entry.data[CONF_LATITUDE],
|
||||
self.config_entry.data[CONF_LONGITUDE],
|
||||
)
|
||||
|
||||
try:
|
||||
return await api_coro
|
||||
except (InvalidKeyError, KeyExpiredError, UnauthorizedError) as ex:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
except AirVisualError as err:
|
||||
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
||||
@@ -15,8 +15,8 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AirVisualConfigEntry
|
||||
from .const import CONF_CITY
|
||||
from .coordinator import AirVisualConfigEntry
|
||||
|
||||
CONF_COORDINATES = "coordinates"
|
||||
CONF_TITLE = "title"
|
||||
|
||||
@@ -2,29 +2,25 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import AirVisualDataUpdateCoordinator
|
||||
|
||||
|
||||
class AirVisualEntity(CoordinatorEntity):
|
||||
class AirVisualEntity(CoordinatorEntity[AirVisualDataUpdateCoordinator]):
|
||||
"""Define a generic AirVisual entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
entry: ConfigEntry,
|
||||
coordinator: AirVisualDataUpdateCoordinator,
|
||||
description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._entry = entry
|
||||
self.entity_description = description
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
|
||||
@@ -8,7 +8,6 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
@@ -24,10 +23,9 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from . import AirVisualConfigEntry
|
||||
from .const import CONF_CITY
|
||||
from .coordinator import AirVisualConfigEntry, AirVisualDataUpdateCoordinator
|
||||
from .entity import AirVisualEntity
|
||||
|
||||
ATTR_CITY = "city"
|
||||
@@ -113,7 +111,7 @@ async def async_setup_entry(
|
||||
"""Set up AirVisual sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
AirVisualGeographySensor(coordinator, entry, description, locale)
|
||||
AirVisualGeographySensor(coordinator, description, locale)
|
||||
for locale in GEOGRAPHY_SENSOR_LOCALES
|
||||
for description in GEOGRAPHY_SENSOR_DESCRIPTIONS
|
||||
)
|
||||
@@ -124,14 +122,14 @@ class AirVisualGeographySensor(AirVisualEntity, SensorEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
entry: ConfigEntry,
|
||||
coordinator: AirVisualDataUpdateCoordinator,
|
||||
description: SensorEntityDescription,
|
||||
locale: str,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, entry, description)
|
||||
super().__init__(coordinator, description)
|
||||
|
||||
entry = coordinator.config_entry
|
||||
self._attr_extra_state_attributes.update(
|
||||
{
|
||||
ATTR_CITY: entry.data.get(CONF_CITY),
|
||||
@@ -182,16 +180,16 @@ class AirVisualGeographySensor(AirVisualEntity, SensorEntity):
|
||||
#
|
||||
# We use any coordinates in the config entry and, in the case of a geography by
|
||||
# name, we fall back to the latitude longitude provided in the coordinator data:
|
||||
latitude = self._entry.data.get(
|
||||
latitude = self.coordinator.config_entry.data.get(
|
||||
CONF_LATITUDE,
|
||||
self.coordinator.data["location"]["coordinates"][1],
|
||||
)
|
||||
longitude = self._entry.data.get(
|
||||
longitude = self.coordinator.config_entry.data.get(
|
||||
CONF_LONGITUDE,
|
||||
self.coordinator.data["location"]["coordinates"][0],
|
||||
)
|
||||
|
||||
if self._entry.options[CONF_SHOW_ON_MAP]:
|
||||
if self.coordinator.config_entry.options[CONF_SHOW_ON_MAP]:
|
||||
self._attr_extra_state_attributes[ATTR_LATITUDE] = latitude
|
||||
self._attr_extra_state_attributes[ATTR_LONGITUDE] = longitude
|
||||
self._attr_extra_state_attributes.pop("lati", None)
|
||||
|
||||
@@ -4,18 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from pyairvisual.node import (
|
||||
InvalidAuthenticationError,
|
||||
NodeConnectionError,
|
||||
NodeProError,
|
||||
NodeSamba,
|
||||
)
|
||||
from pyairvisual.node import NodeProError, NodeSamba
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PASSWORD,
|
||||
@@ -23,25 +14,16 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import LOGGER
|
||||
from .coordinator import (
|
||||
AirVisualProConfigEntry,
|
||||
AirVisualProCoordinator,
|
||||
AirVisualProData,
|
||||
)
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
type AirVisualProConfigEntry = ConfigEntry[AirVisualProData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirVisualProData:
|
||||
"""Define a data class."""
|
||||
|
||||
coordinator: DataUpdateCoordinator
|
||||
node: NodeSamba
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AirVisualProConfigEntry
|
||||
@@ -54,48 +36,15 @@ async def async_setup_entry(
|
||||
except NodeProError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
reload_task: asyncio.Task | None = None
|
||||
|
||||
async def async_get_data() -> dict[str, Any]:
|
||||
"""Get data from the device."""
|
||||
try:
|
||||
data = await node.async_get_latest_measurements()
|
||||
data["history"] = {}
|
||||
if data["settings"].get("follow_mode") == "device":
|
||||
history = await node.async_get_history(include_trends=False)
|
||||
data["history"] = history.get("measurements", [])[-1]
|
||||
except InvalidAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Invalid Samba password") from err
|
||||
except NodeConnectionError as err:
|
||||
nonlocal reload_task
|
||||
if not reload_task:
|
||||
reload_task = hass.async_create_task(
|
||||
hass.config_entries.async_reload(entry.entry_id)
|
||||
)
|
||||
raise UpdateFailed(f"Connection to Pro unit lost: {err}") from err
|
||||
except NodeProError as err:
|
||||
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
||||
|
||||
return data
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
name="Node/Pro data",
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
update_method=async_get_data,
|
||||
)
|
||||
|
||||
coordinator = AirVisualProCoordinator(hass, entry, node)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = AirVisualProData(coordinator=coordinator, node=node)
|
||||
|
||||
async def async_shutdown(_: Event) -> None:
|
||||
"""Define an event handler to disconnect from the websocket."""
|
||||
nonlocal reload_task
|
||||
if reload_task:
|
||||
if coordinator.reload_task:
|
||||
with suppress(asyncio.CancelledError):
|
||||
reload_task.cancel()
|
||||
coordinator.reload_task.cancel()
|
||||
await node.async_disconnect()
|
||||
|
||||
entry.async_on_unload(
|
||||
|
||||
79
homeassistant/components/airvisual_pro/coordinator.py
Normal file
79
homeassistant/components/airvisual_pro/coordinator.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""DataUpdateCoordinator for the AirVisual Pro integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from pyairvisual.node import (
|
||||
InvalidAuthenticationError,
|
||||
NodeConnectionError,
|
||||
NodeProError,
|
||||
NodeSamba,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import LOGGER
|
||||
|
||||
UPDATE_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AirVisualProData:
|
||||
"""Define a data class."""
|
||||
|
||||
coordinator: AirVisualProCoordinator
|
||||
node: NodeSamba
|
||||
|
||||
|
||||
type AirVisualProConfigEntry = ConfigEntry[AirVisualProData]
|
||||
|
||||
|
||||
class AirVisualProCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Coordinator for AirVisual Pro data."""
|
||||
|
||||
config_entry: AirVisualProConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: AirVisualProConfigEntry,
|
||||
node: NodeSamba,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=config_entry,
|
||||
name="Node/Pro data",
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self._node = node
|
||||
self.reload_task: asyncio.Task[bool] | None = None
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Get data from the device."""
|
||||
try:
|
||||
data = await self._node.async_get_latest_measurements()
|
||||
data["history"] = {}
|
||||
if data["settings"].get("follow_mode") == "device":
|
||||
history = await self._node.async_get_history(include_trends=False)
|
||||
data["history"] = history.get("measurements", [])[-1]
|
||||
except InvalidAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Invalid Samba password") from err
|
||||
except NodeConnectionError as err:
|
||||
if self.reload_task is None:
|
||||
self.reload_task = self.hass.async_create_task(
|
||||
self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
)
|
||||
raise UpdateFailed(f"Connection to Pro unit lost: {err}") from err
|
||||
except NodeProError as err:
|
||||
raise UpdateFailed(f"Error while retrieving data: {err}") from err
|
||||
|
||||
return data
|
||||
@@ -8,7 +8,7 @@ from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AirVisualProConfigEntry
|
||||
from .coordinator import AirVisualProConfigEntry
|
||||
|
||||
CONF_MAC_ADDRESS = "mac_address"
|
||||
CONF_SERIAL_NUMBER = "serial_number"
|
||||
|
||||
@@ -4,19 +4,17 @@ from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirVisualProCoordinator
|
||||
|
||||
|
||||
class AirVisualProEntity(CoordinatorEntity):
|
||||
class AirVisualProEntity(CoordinatorEntity[AirVisualProCoordinator]):
|
||||
"""Define a generic AirVisual Pro entity."""
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator, description: EntityDescription
|
||||
self, coordinator: AirVisualProCoordinator, description: EntityDescription
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
@@ -22,7 +22,7 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import AirVisualProConfigEntry
|
||||
from .coordinator import AirVisualProConfigEntry
|
||||
from .entity import AirVisualProEntity
|
||||
|
||||
|
||||
|
||||
@@ -66,9 +66,7 @@ rules:
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: We can automatically remove removed devices
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
|
||||
@@ -142,15 +142,18 @@ _EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"climate",
|
||||
"cover",
|
||||
"device_tracker",
|
||||
"door",
|
||||
"fan",
|
||||
"humidifier",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"lock",
|
||||
"media_player",
|
||||
"number",
|
||||
"person",
|
||||
"remote",
|
||||
"scene",
|
||||
"schedule",
|
||||
"siren",
|
||||
"switch",
|
||||
"text",
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["python-bsblan==5.1.1"],
|
||||
"requirements": ["python-bsblan==5.1.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
31
homeassistant/components/chess_com/__init__.py
Normal file
31
homeassistant/components/chess_com/__init__.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""The Chess.com integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import ChessConfigEntry, ChessCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ChessConfigEntry) -> bool:
|
||||
"""Set up Chess.com from a config entry."""
|
||||
|
||||
coordinator = ChessCoordinator(hass, entry)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ChessConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
47
homeassistant/components/chess_com/config_flow.py
Normal file
47
homeassistant/components/chess_com/config_flow.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Config flow for the Chess.com integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from chess_com_api import ChessComClient, NotFoundError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ChessConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Chess.com."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = ChessComClient(session=session)
|
||||
try:
|
||||
user = await client.get_player(user_input[CONF_USERNAME])
|
||||
except NotFoundError:
|
||||
errors["base"] = "player_not_found"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(str(user.player_id))
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(title=user.name, data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema({vol.Required(CONF_USERNAME): str}),
|
||||
errors=errors,
|
||||
)
|
||||
3
homeassistant/components/chess_com/const.py
Normal file
3
homeassistant/components/chess_com/const.py
Normal file
@@ -0,0 +1,3 @@
|
||||
"""Constants for the Chess.com integration."""
|
||||
|
||||
DOMAIN = "chess_com"
|
||||
57
homeassistant/components/chess_com/coordinator.py
Normal file
57
homeassistant/components/chess_com/coordinator.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Coordinator for Chess.com."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from chess_com_api import ChessComAPIError, ChessComClient, Player, PlayerStats
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type ChessConfigEntry = ConfigEntry[ChessCoordinator]
|
||||
|
||||
|
||||
@dataclass
|
||||
class ChessData:
|
||||
"""Data for Chess.com."""
|
||||
|
||||
player: Player
|
||||
stats: PlayerStats
|
||||
|
||||
|
||||
class ChessCoordinator(DataUpdateCoordinator[ChessData]):
|
||||
"""Coordinator for Chess.com."""
|
||||
|
||||
config_entry: ChessConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: ChessConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=config_entry.title,
|
||||
update_interval=timedelta(hours=1),
|
||||
)
|
||||
self.client = ChessComClient(session=async_get_clientsession(hass))
|
||||
|
||||
async def _async_update_data(self) -> ChessData:
|
||||
"""Update data from Chess.com."""
|
||||
try:
|
||||
player = await self.client.get_player(self.config_entry.data[CONF_USERNAME])
|
||||
stats = await self.client.get_player_stats(
|
||||
self.config_entry.data[CONF_USERNAME]
|
||||
)
|
||||
except ChessComAPIError as err:
|
||||
raise UpdateFailed(f"Error communicating with Chess.com: {err}") from err
|
||||
return ChessData(player=player, stats=stats)
|
||||
26
homeassistant/components/chess_com/entity.py
Normal file
26
homeassistant/components/chess_com/entity.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Base entity for Chess.com integration."""
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import ChessCoordinator
|
||||
|
||||
|
||||
class ChessEntity(CoordinatorEntity[ChessCoordinator]):
|
||||
"""Base entity for Chess.com integration."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: ChessCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
if TYPE_CHECKING:
|
||||
assert coordinator.config_entry.unique_id is not None
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, coordinator.config_entry.unique_id)},
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
manufacturer="Chess.com",
|
||||
)
|
||||
21
homeassistant/components/chess_com/icons.json
Normal file
21
homeassistant/components/chess_com/icons.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"chess_daily_rating": {
|
||||
"default": "mdi:chart-line"
|
||||
},
|
||||
"followers": {
|
||||
"default": "mdi:account-multiple"
|
||||
},
|
||||
"total_daily_draw": {
|
||||
"default": "mdi:chess-pawn"
|
||||
},
|
||||
"total_daily_lost": {
|
||||
"default": "mdi:chess-pawn"
|
||||
},
|
||||
"total_daily_won": {
|
||||
"default": "mdi:chess-pawn"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
12
homeassistant/components/chess_com/manifest.json
Normal file
12
homeassistant/components/chess_com/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "chess_com",
|
||||
"name": "Chess.com",
|
||||
"codeowners": ["@joostlek"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/chess_com",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["chess_com_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["chess-com-api==1.1.0"]
|
||||
}
|
||||
74
homeassistant/components/chess_com/quality_scale.yaml
Normal file
74
homeassistant/components/chess_com/quality_scale.yaml
Normal file
@@ -0,0 +1,74 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: There are no custom actions
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: There are no custom actions
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities do not explicitly subscribe to events
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: There are no configuration parameters
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Can't detect a game
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Can't detect a game
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: There are no repairable issues
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
97
homeassistant/components/chess_com/sensor.py
Normal file
97
homeassistant/components/chess_com/sensor.py
Normal file
@@ -0,0 +1,97 @@
|
||||
"""Sensor platform for Chess.com integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ChessConfigEntry
|
||||
from .coordinator import ChessCoordinator, ChessData
|
||||
from .entity import ChessEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class ChessEntityDescription(SensorEntityDescription):
|
||||
"""Sensor description for Chess.com player."""
|
||||
|
||||
value_fn: Callable[[ChessData], float]
|
||||
|
||||
|
||||
SENSORS: tuple[ChessEntityDescription, ...] = (
|
||||
ChessEntityDescription(
|
||||
key="followers",
|
||||
translation_key="followers",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda state: state.player.followers,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
ChessEntityDescription(
|
||||
key="chess_daily_rating",
|
||||
translation_key="chess_daily_rating",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
value_fn=lambda state: state.stats.chess_daily["last"]["rating"],
|
||||
),
|
||||
ChessEntityDescription(
|
||||
key="total_daily_won",
|
||||
translation_key="total_daily_won",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda state: state.stats.chess_daily["record"]["win"],
|
||||
),
|
||||
ChessEntityDescription(
|
||||
key="total_daily_lost",
|
||||
translation_key="total_daily_lost",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda state: state.stats.chess_daily["record"]["loss"],
|
||||
),
|
||||
ChessEntityDescription(
|
||||
key="total_daily_draw",
|
||||
translation_key="total_daily_draw",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
value_fn=lambda state: state.stats.chess_daily["record"]["draw"],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ChessConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Initialize the entries."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
ChessPlayerSensor(coordinator, description) for description in SENSORS
|
||||
)
|
||||
|
||||
|
||||
class ChessPlayerSensor(ChessEntity, SensorEntity):
|
||||
"""Chess.com sensor."""
|
||||
|
||||
entity_description: ChessEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ChessCoordinator,
|
||||
description: ChessEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}.{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
47
homeassistant/components/chess_com/strings.json
Normal file
47
homeassistant/components/chess_com/strings.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
},
|
||||
"error": {
|
||||
"player_not_found": "Player not found.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add player"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"username": "The Chess.com username of the player to monitor."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"chess_daily_rating": {
|
||||
"name": "Daily chess rating"
|
||||
},
|
||||
"followers": {
|
||||
"name": "Followers",
|
||||
"unit_of_measurement": "followers"
|
||||
},
|
||||
"total_daily_draw": {
|
||||
"name": "Total chess games drawn",
|
||||
"unit_of_measurement": "[%key:component::chess_com::entity::sensor::total_daily_won::unit_of_measurement%]"
|
||||
},
|
||||
"total_daily_lost": {
|
||||
"name": "Total chess games lost",
|
||||
"unit_of_measurement": "[%key:component::chess_com::entity::sensor::total_daily_won::unit_of_measurement%]"
|
||||
},
|
||||
"total_daily_won": {
|
||||
"name": "Total chess games won",
|
||||
"unit_of_measurement": "games"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -107,17 +107,17 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
return UnitOfTemperature.FAHRENHEIT
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> float:
|
||||
"""Return the current temperature."""
|
||||
return self._unit.temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
def target_temperature(self) -> float:
|
||||
"""Return the temperature we are trying to reach."""
|
||||
return self._unit.thermostat
|
||||
|
||||
@property
|
||||
def hvac_mode(self):
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac target hvac state."""
|
||||
mode = self._unit.mode
|
||||
if not self._unit.is_on:
|
||||
@@ -126,7 +126,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
return CM_TO_HA_STATE[mode]
|
||||
|
||||
@property
|
||||
def fan_mode(self):
|
||||
def fan_mode(self) -> str:
|
||||
"""Return the fan setting."""
|
||||
|
||||
# Normalize to lowercase for lookup, and pass unknown lowercase values through.
|
||||
@@ -145,7 +145,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
return CM_TO_HA_FAN[fan_speed_lower]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
def fan_modes(self) -> list[str]:
|
||||
"""Return the list of available fan modes."""
|
||||
return FAN_MODES
|
||||
|
||||
|
||||
@@ -30,9 +30,16 @@ async def async_setup_entry(
|
||||
async_add_entities(
|
||||
[
|
||||
DemoWaterHeater(
|
||||
"Demo Water Heater", 119, UnitOfTemperature.FAHRENHEIT, False, "eco", 1
|
||||
"demo_water_heater",
|
||||
"Demo Water Heater",
|
||||
119,
|
||||
UnitOfTemperature.FAHRENHEIT,
|
||||
False,
|
||||
"eco",
|
||||
1,
|
||||
),
|
||||
DemoWaterHeater(
|
||||
"demo_water_heater_celsius",
|
||||
"Demo Water Heater Celsius",
|
||||
45,
|
||||
UnitOfTemperature.CELSIUS,
|
||||
@@ -52,6 +59,7 @@ class DemoWaterHeater(WaterHeaterEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
unique_id: str,
|
||||
name: str,
|
||||
target_temperature: int,
|
||||
unit_of_measurement: str,
|
||||
@@ -60,6 +68,7 @@ class DemoWaterHeater(WaterHeaterEntity):
|
||||
target_temperature_step: float,
|
||||
) -> None:
|
||||
"""Initialize the water_heater device."""
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_name = name
|
||||
if target_temperature is not None:
|
||||
self._attr_supported_features |= WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
15
homeassistant/components/door/__init__.py
Normal file
15
homeassistant/components/door/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Integration for door triggers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
DOMAIN = "door"
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the component."""
|
||||
return True
|
||||
10
homeassistant/components/door/icons.json
Normal file
10
homeassistant/components/door/icons.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"triggers": {
|
||||
"closed": {
|
||||
"trigger": "mdi:door-closed"
|
||||
},
|
||||
"opened": {
|
||||
"trigger": "mdi:door-open"
|
||||
}
|
||||
}
|
||||
}
|
||||
8
homeassistant/components/door/manifest.json
Normal file
8
homeassistant/components/door/manifest.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"domain": "door",
|
||||
"name": "Door",
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/door",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
38
homeassistant/components/door/strings.json
Normal file
38
homeassistant/components/door/strings.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted doors to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "Door",
|
||||
"triggers": {
|
||||
"closed": {
|
||||
"description": "Triggers after one or more doors close.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::door::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::door::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Door closed"
|
||||
},
|
||||
"opened": {
|
||||
"description": "Triggers after one or more doors open.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::door::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::door::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Door opened"
|
||||
}
|
||||
}
|
||||
}
|
||||
83
homeassistant/components/door/trigger.py
Normal file
83
homeassistant/components/door/trigger.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Provides triggers for doors."""
|
||||
|
||||
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
|
||||
from homeassistant.components.cover import ATTR_IS_CLOSED, DOMAIN as COVER_DOMAIN
|
||||
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import EntityTriggerBase, Trigger
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
DEVICE_CLASS_DOOR = "door"
|
||||
|
||||
|
||||
def get_device_class_or_undefined(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> str | None | UndefinedType:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return get_device_class(hass, entity_id)
|
||||
except HomeAssistantError:
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class DoorTriggerBase(EntityTriggerBase):
|
||||
"""Base trigger for door state changes."""
|
||||
|
||||
_domains = {BINARY_SENSOR_DOMAIN, COVER_DOMAIN}
|
||||
_binary_sensor_target_state: str
|
||||
_cover_is_closed_target_value: bool
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities by door device class."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if get_device_class_or_undefined(self._hass, entity_id) == DEVICE_CLASS_DOOR
|
||||
}
|
||||
|
||||
def is_valid_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target door state."""
|
||||
if split_entity_id(state.entity_id)[0] == COVER_DOMAIN:
|
||||
return (
|
||||
state.attributes.get(ATTR_IS_CLOSED)
|
||||
== self._cover_is_closed_target_value
|
||||
)
|
||||
return state.state == self._binary_sensor_target_state
|
||||
|
||||
def is_valid_transition(self, from_state: State, to_state: State) -> bool:
|
||||
"""Check if the transition is valid for a door state change."""
|
||||
if from_state.state in (STATE_UNAVAILABLE, STATE_UNKNOWN):
|
||||
return False
|
||||
if split_entity_id(from_state.entity_id)[0] == COVER_DOMAIN:
|
||||
if (from_is_closed := from_state.attributes.get(ATTR_IS_CLOSED)) is None:
|
||||
return False
|
||||
return from_is_closed != to_state.attributes.get(ATTR_IS_CLOSED)
|
||||
return from_state.state != to_state.state
|
||||
|
||||
|
||||
class DoorOpenedTrigger(DoorTriggerBase):
|
||||
"""Trigger for door opened state changes."""
|
||||
|
||||
_binary_sensor_target_state = STATE_ON
|
||||
_cover_is_closed_target_value = False
|
||||
|
||||
|
||||
class DoorClosedTrigger(DoorTriggerBase):
|
||||
"""Trigger for door closed state changes."""
|
||||
|
||||
_binary_sensor_target_state = STATE_OFF
|
||||
_cover_is_closed_target_value = True
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"opened": DoorOpenedTrigger,
|
||||
"closed": DoorClosedTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for doors."""
|
||||
return TRIGGERS
|
||||
29
homeassistant/components/door/triggers.yaml
Normal file
29
homeassistant/components/door/triggers.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
.trigger_common_fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
closed:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
- domain: binary_sensor
|
||||
device_class: door
|
||||
- domain: cover
|
||||
device_class: door
|
||||
|
||||
opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
- domain: binary_sensor
|
||||
device_class: door
|
||||
- domain: cover
|
||||
device_class: door
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["paho_mqtt", "pyeconet"],
|
||||
"requirements": ["pyeconet==0.2.1"]
|
||||
"requirements": ["pyeconet==0.2.2"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import EheimDigitalConfigEntry
|
||||
|
||||
TO_REDACT = {"emailAddr", "usrName"}
|
||||
TO_REDACT = {"emailAddr", "usrName", "api_usrName", "api_password"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
|
||||
@@ -8,6 +8,7 @@ from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.heater import EheimDigitalHeater
|
||||
from eheimdigital.reeflex import EheimDigitalReeflexUV
|
||||
from eheimdigital.types import HeaterUnit
|
||||
|
||||
from homeassistant.components.number import (
|
||||
@@ -44,6 +45,47 @@ class EheimDigitalNumberDescription[_DeviceT: EheimDigitalDevice](
|
||||
uom_fn: Callable[[_DeviceT], str] | None = None
|
||||
|
||||
|
||||
REEFLEX_DESCRIPTIONS: tuple[
|
||||
EheimDigitalNumberDescription[EheimDigitalReeflexUV], ...
|
||||
] = (
|
||||
EheimDigitalNumberDescription[EheimDigitalReeflexUV](
|
||||
key="daily_burn_time",
|
||||
translation_key="daily_burn_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_min_value=0,
|
||||
native_max_value=1440,
|
||||
value_fn=lambda device: device.daily_burn_time,
|
||||
set_value_fn=lambda device, value: device.set_daily_burn_time(int(value)),
|
||||
),
|
||||
EheimDigitalNumberDescription[EheimDigitalReeflexUV](
|
||||
key="booster_time",
|
||||
translation_key="booster_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_min_value=0,
|
||||
native_max_value=20160,
|
||||
value_fn=lambda device: device.booster_time,
|
||||
set_value_fn=lambda device, value: device.set_booster_time(int(value)),
|
||||
),
|
||||
EheimDigitalNumberDescription[EheimDigitalReeflexUV](
|
||||
key="pause_time",
|
||||
translation_key="pause_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
native_step=PRECISION_WHOLE,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
device_class=NumberDeviceClass.DURATION,
|
||||
native_min_value=0,
|
||||
native_max_value=20160,
|
||||
value_fn=lambda device: device.pause_time,
|
||||
set_value_fn=lambda device, value: device.set_pause_time(int(value)),
|
||||
),
|
||||
)
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalNumberDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalNumberDescription[EheimDigitalFilter](
|
||||
key="high_pulse_time",
|
||||
@@ -189,6 +231,13 @@ async def async_setup_entry(
|
||||
)
|
||||
for description in HEATER_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalReeflexUV):
|
||||
entities.extend(
|
||||
EheimDigitalNumber[EheimDigitalReeflexUV](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in REEFLEX_DESCRIPTIONS
|
||||
)
|
||||
entities.extend(
|
||||
EheimDigitalNumber[EheimDigitalDevice](coordinator, device, description)
|
||||
for description in GENERAL_DESCRIPTIONS
|
||||
|
||||
@@ -7,9 +7,11 @@ from typing import Any, Literal, override
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.reeflex import EheimDigitalReeflexUV
|
||||
from eheimdigital.types import (
|
||||
FilterMode,
|
||||
FilterModeProf,
|
||||
ReeflexMode,
|
||||
UnitOfMeasurement as EheimDigitalUnitOfMeasurement,
|
||||
)
|
||||
|
||||
@@ -36,6 +38,20 @@ class EheimDigitalSelectDescription[_DeviceT: EheimDigitalDevice](
|
||||
set_value_fn: Callable[[_DeviceT, str], Awaitable[None] | None]
|
||||
|
||||
|
||||
REEFLEX_DESCRIPTIONS: tuple[
|
||||
EheimDigitalSelectDescription[EheimDigitalReeflexUV], ...
|
||||
] = (
|
||||
EheimDigitalSelectDescription[EheimDigitalReeflexUV](
|
||||
key="mode",
|
||||
translation_key="mode",
|
||||
value_fn=lambda device: device.mode.name.lower(),
|
||||
set_value_fn=(
|
||||
lambda device, value: device.set_mode(ReeflexMode[value.upper()])
|
||||
),
|
||||
options=[name.lower() for name in ReeflexMode.__members__],
|
||||
),
|
||||
)
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalSelectDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalSelectDescription[EheimDigitalFilter](
|
||||
key="filter_mode",
|
||||
@@ -176,6 +192,13 @@ async def async_setup_entry(
|
||||
EheimDigitalFilterSelect(coordinator, device, description)
|
||||
for description in FILTER_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalReeflexUV):
|
||||
entities.extend(
|
||||
EheimDigitalSelect[EheimDigitalReeflexUV](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in REEFLEX_DESCRIPTIONS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -58,6 +58,12 @@
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"booster_time": {
|
||||
"name": "Booster duration"
|
||||
},
|
||||
"daily_burn_time": {
|
||||
"name": "Daily burn duration"
|
||||
},
|
||||
"day_speed": {
|
||||
"name": "Day speed"
|
||||
},
|
||||
@@ -76,6 +82,7 @@
|
||||
"night_temperature_offset": {
|
||||
"name": "Night temperature offset"
|
||||
},
|
||||
"pause_time": { "name": "Pause duration" },
|
||||
"system_led": {
|
||||
"name": "System LED brightness"
|
||||
},
|
||||
@@ -108,6 +115,10 @@
|
||||
"manual_speed": {
|
||||
"name": "Manual speed"
|
||||
},
|
||||
"mode": {
|
||||
"name": "Operation mode",
|
||||
"state": { "constant": "Constant", "daycycle": "Daycycle" }
|
||||
},
|
||||
"night_speed": {
|
||||
"name": "Night speed"
|
||||
}
|
||||
@@ -127,9 +138,18 @@
|
||||
"operating_time": {
|
||||
"name": "Operating time"
|
||||
},
|
||||
"remaining_booster_time": {
|
||||
"name": "Remaining booster time"
|
||||
},
|
||||
"remaining_pause_time": {
|
||||
"name": "Remaining pause time"
|
||||
},
|
||||
"service_hours": {
|
||||
"name": "Remaining hours until service"
|
||||
},
|
||||
"time_until_next_service": {
|
||||
"name": "Time until next service"
|
||||
},
|
||||
"turn_feeding_time": {
|
||||
"name": "Remaining off time after feeding"
|
||||
},
|
||||
@@ -137,12 +157,26 @@
|
||||
"name": "Remaining off time"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"booster": {
|
||||
"name": "Booster"
|
||||
},
|
||||
"expert": {
|
||||
"name": "Expert mode"
|
||||
},
|
||||
"pause": {
|
||||
"name": "Pause"
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"day_start_time": {
|
||||
"name": "Day start time"
|
||||
},
|
||||
"night_start_time": {
|
||||
"name": "Night start time"
|
||||
},
|
||||
"start_time": {
|
||||
"name": "Start time"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
"""EHEIM Digital switches."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, override
|
||||
|
||||
from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.reeflex import EheimDigitalReeflexUV
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
@@ -17,6 +21,50 @@ from .entity import EheimDigitalEntity, exception_handler
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class EheimDigitalSwitchDescription[_DeviceT: EheimDigitalDevice](
|
||||
SwitchEntityDescription
|
||||
):
|
||||
"""Class describing EHEIM Digital switch entities."""
|
||||
|
||||
is_on_fn: Callable[[_DeviceT], bool]
|
||||
set_fn: Callable[[_DeviceT, bool], Awaitable[None]]
|
||||
|
||||
|
||||
REEFLEX_DESCRIPTIONS: tuple[
|
||||
EheimDigitalSwitchDescription[EheimDigitalReeflexUV], ...
|
||||
] = (
|
||||
EheimDigitalSwitchDescription[EheimDigitalReeflexUV](
|
||||
key="active",
|
||||
name=None,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
is_on_fn=lambda device: device.is_active,
|
||||
set_fn=lambda device, value: device.set_active(active=value),
|
||||
),
|
||||
EheimDigitalSwitchDescription[EheimDigitalReeflexUV](
|
||||
key="pause",
|
||||
translation_key="pause",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
is_on_fn=lambda device: device.pause,
|
||||
set_fn=lambda device, value: device.set_pause(pause=value),
|
||||
),
|
||||
EheimDigitalSwitchDescription[EheimDigitalReeflexUV](
|
||||
key="booster",
|
||||
translation_key="booster",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
is_on_fn=lambda device: device.booster,
|
||||
set_fn=lambda device, value: device.set_booster(active=value),
|
||||
),
|
||||
EheimDigitalSwitchDescription[EheimDigitalReeflexUV](
|
||||
key="expert",
|
||||
translation_key="expert",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
is_on_fn=lambda device: device.expert,
|
||||
set_fn=lambda device, value: device.set_expert(active=value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: EheimDigitalConfigEntry,
|
||||
@@ -32,7 +80,14 @@ async def async_setup_entry(
|
||||
entities: list[SwitchEntity] = []
|
||||
for device in device_address.values():
|
||||
if isinstance(device, (EheimDigitalClassicVario, EheimDigitalFilter)):
|
||||
entities.append(EheimDigitalFilterSwitch(coordinator, device)) # noqa: PERF401
|
||||
entities.append(EheimDigitalFilterSwitch(coordinator, device))
|
||||
if isinstance(device, EheimDigitalReeflexUV):
|
||||
entities.extend(
|
||||
EheimDigitalSwitch[EheimDigitalReeflexUV](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in REEFLEX_DESCRIPTIONS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -40,6 +95,39 @@ async def async_setup_entry(
|
||||
async_setup_device_entities(coordinator.hub.devices)
|
||||
|
||||
|
||||
class EheimDigitalSwitch[_DeviceT: EheimDigitalDevice](
|
||||
EheimDigitalEntity[_DeviceT], SwitchEntity
|
||||
):
|
||||
"""Represent a EHEIM Digital switch entity."""
|
||||
|
||||
entity_description: EheimDigitalSwitchDescription[_DeviceT]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EheimDigitalUpdateCoordinator,
|
||||
device: _DeviceT,
|
||||
description: EheimDigitalSwitchDescription[_DeviceT],
|
||||
) -> None:
|
||||
"""Initialize an EHEIM Digital switch entity."""
|
||||
super().__init__(coordinator, device)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{self._device_address}_{description.key}"
|
||||
|
||||
@exception_handler
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the switch."""
|
||||
return await self.entity_description.set_fn(self._device, True)
|
||||
|
||||
@exception_handler
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the switch."""
|
||||
return await self.entity_description.set_fn(self._device, False)
|
||||
|
||||
@override
|
||||
def _async_update_attrs(self) -> None:
|
||||
self._attr_is_on = self.entity_description.is_on_fn(self._device)
|
||||
|
||||
|
||||
class EheimDigitalFilterSwitch(
|
||||
EheimDigitalEntity[EheimDigitalClassicVario | EheimDigitalFilter], SwitchEntity
|
||||
):
|
||||
|
||||
@@ -9,6 +9,7 @@ from eheimdigital.classic_vario import EheimDigitalClassicVario
|
||||
from eheimdigital.device import EheimDigitalDevice
|
||||
from eheimdigital.filter import EheimDigitalFilter
|
||||
from eheimdigital.heater import EheimDigitalHeater
|
||||
from eheimdigital.reeflex import EheimDigitalReeflexUV
|
||||
|
||||
from homeassistant.components.time import TimeEntity, TimeEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
@@ -29,6 +30,16 @@ class EheimDigitalTimeDescription[_DeviceT: EheimDigitalDevice](TimeEntityDescri
|
||||
set_value_fn: Callable[[_DeviceT, time], Awaitable[None]]
|
||||
|
||||
|
||||
REEFLEX_DESCRIPTIONS: tuple[EheimDigitalTimeDescription[EheimDigitalReeflexUV], ...] = (
|
||||
EheimDigitalTimeDescription[EheimDigitalReeflexUV](
|
||||
key="start_time",
|
||||
translation_key="start_time",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
value_fn=lambda device: device.start_time,
|
||||
set_value_fn=lambda device, value: device.set_day_start_time(value),
|
||||
),
|
||||
)
|
||||
|
||||
FILTER_DESCRIPTIONS: tuple[EheimDigitalTimeDescription[EheimDigitalFilter], ...] = (
|
||||
EheimDigitalTimeDescription[EheimDigitalFilter](
|
||||
key="day_start_time",
|
||||
@@ -118,6 +129,13 @@ async def async_setup_entry(
|
||||
)
|
||||
for description in HEATER_DESCRIPTIONS
|
||||
)
|
||||
if isinstance(device, EheimDigitalReeflexUV):
|
||||
entities.extend(
|
||||
EheimDigitalTime[EheimDigitalReeflexUV](
|
||||
coordinator, device, description
|
||||
)
|
||||
for description in REEFLEX_DESCRIPTIONS
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Support for EnOcean devices."""
|
||||
|
||||
from serial import SerialException
|
||||
from enocean_async import Gateway
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
@@ -8,12 +8,15 @@ from homeassistant.const import CONF_DEVICE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_connect,
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .dongle import EnOceanDongle
|
||||
from .const import DOMAIN, SIGNAL_RECEIVE_MESSAGE, SIGNAL_SEND_MESSAGE
|
||||
|
||||
type EnOceanConfigEntry = ConfigEntry[EnOceanDongle]
|
||||
type EnOceanConfigEntry = ConfigEntry[Gateway]
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{DOMAIN: vol.Schema({vol.Required(CONF_DEVICE): cv.string})}, extra=vol.ALLOW_EXTRA
|
||||
@@ -27,7 +30,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
if hass.config_entries.async_entries(DOMAIN):
|
||||
# We can only have one dongle. If there is already one in the config,
|
||||
# We can only have one gateway. If there is already one in the config,
|
||||
# there is no need to import the yaml based config.
|
||||
return True
|
||||
|
||||
@@ -43,23 +46,31 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: EnOceanConfigEntry
|
||||
) -> bool:
|
||||
"""Set up an EnOcean dongle for the given entry."""
|
||||
try:
|
||||
usb_dongle = EnOceanDongle(hass, config_entry.data[CONF_DEVICE])
|
||||
except SerialException as err:
|
||||
raise ConfigEntryNotReady(f"Failed to set up EnOcean dongle: {err}") from err
|
||||
await usb_dongle.async_setup()
|
||||
config_entry.runtime_data = usb_dongle
|
||||
"""Set up an EnOcean gateway for the given entry."""
|
||||
gateway = Gateway(port=config_entry.data[CONF_DEVICE])
|
||||
|
||||
gateway.add_erp1_received_callback(
|
||||
lambda packet: async_dispatcher_send(hass, SIGNAL_RECEIVE_MESSAGE, packet)
|
||||
)
|
||||
|
||||
try:
|
||||
await gateway.start()
|
||||
except ConnectionError as err:
|
||||
gateway.stop()
|
||||
raise ConfigEntryNotReady(f"Failed to start EnOcean gateway: {err}") from err
|
||||
|
||||
config_entry.runtime_data = gateway
|
||||
|
||||
config_entry.async_on_unload(
|
||||
async_dispatcher_connect(hass, SIGNAL_SEND_MESSAGE, gateway.send_esp3_packet)
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, config_entry: EnOceanConfigEntry
|
||||
) -> bool:
|
||||
"""Unload EnOcean config entry."""
|
||||
|
||||
enocean_dongle = config_entry.runtime_data
|
||||
enocean_dongle.unload()
|
||||
"""Unload EnOcean config entry: stop the gateway."""
|
||||
|
||||
config_entry.runtime_data.stop()
|
||||
return True
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enocean.utils import combine_hex
|
||||
from enocean_async import ERP1Telegram
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .entity import EnOceanEntity
|
||||
from .entity import EnOceanEntity, combine_hex
|
||||
|
||||
DEFAULT_NAME = "EnOcean binary sensor"
|
||||
DEPENDENCIES = ["enocean"]
|
||||
@@ -68,29 +68,25 @@ class EnOceanBinarySensor(EnOceanEntity, BinarySensorEntity):
|
||||
self._attr_unique_id = f"{combine_hex(dev_id)}-{device_class}"
|
||||
self._attr_name = dev_name
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Fire an event with the data that have changed.
|
||||
|
||||
This method is called when there is an incoming packet associated
|
||||
with this platform.
|
||||
|
||||
Example packet data:
|
||||
- 2nd button pressed
|
||||
['0xf6', '0x10', '0x00', '0x2d', '0xcf', '0x45', '0x30']
|
||||
- button released
|
||||
['0xf6', '0x00', '0x00', '0x2d', '0xcf', '0x45', '0x20']
|
||||
"""
|
||||
if not self.address:
|
||||
return
|
||||
# Energy Bow
|
||||
pushed = None
|
||||
|
||||
if packet.data[6] == 0x30:
|
||||
if telegram.status == 0x30:
|
||||
pushed = 1
|
||||
elif packet.data[6] == 0x20:
|
||||
elif telegram.status == 0x20:
|
||||
pushed = 0
|
||||
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
action = packet.data[1]
|
||||
action = telegram.telegram_data[0]
|
||||
if action == 0x70:
|
||||
self.which = 0
|
||||
self.onoff = 0
|
||||
@@ -112,7 +108,7 @@ class EnOceanBinarySensor(EnOceanEntity, BinarySensorEntity):
|
||||
self.hass.bus.fire(
|
||||
EVENT_BUTTON_PRESSED,
|
||||
{
|
||||
"id": self.dev_id,
|
||||
"id": self.address.to_bytelist(),
|
||||
"pushed": pushed,
|
||||
"which": self.which,
|
||||
"onoff": self.onoff,
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
"""Config flows for the EnOcean integration."""
|
||||
|
||||
import glob
|
||||
from typing import Any
|
||||
|
||||
from enocean_async import Gateway
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import usb
|
||||
@@ -19,7 +21,6 @@ from homeassistant.helpers.selector import (
|
||||
)
|
||||
from homeassistant.helpers.service_info.usb import UsbServiceInfo
|
||||
|
||||
from . import dongle
|
||||
from .const import DOMAIN, ERROR_INVALID_DONGLE_PATH, LOGGER, MANUFACTURER
|
||||
|
||||
MANUAL_SCHEMA = vol.Schema(
|
||||
@@ -29,6 +30,24 @@ MANUAL_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def _detect_usb_dongle() -> list[str]:
|
||||
"""Return a list of candidate paths for USB EnOcean dongles.
|
||||
|
||||
This method is currently a bit simplistic, it may need to be
|
||||
improved to support more configurations and OS.
|
||||
"""
|
||||
globs_to_test = [
|
||||
"/dev/tty*FTOA2PV*",
|
||||
"/dev/serial/by-id/*EnOcean*",
|
||||
"/dev/tty.usbserial-*",
|
||||
]
|
||||
found_paths = []
|
||||
for current_glob in globs_to_test:
|
||||
found_paths.extend(glob.glob(current_glob))
|
||||
|
||||
return found_paths
|
||||
|
||||
|
||||
class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the enOcean config flows."""
|
||||
|
||||
@@ -107,7 +126,7 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return await self.async_step_manual()
|
||||
return await self.async_step_manual(user_input)
|
||||
|
||||
devices = await self.hass.async_add_executor_job(dongle.detect)
|
||||
devices = await self.hass.async_add_executor_job(_detect_usb_dongle)
|
||||
if len(devices) == 0:
|
||||
return await self.async_step_manual()
|
||||
devices.append(self.MANUAL_PATH_VALUE)
|
||||
@@ -146,7 +165,17 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
async def validate_enocean_conf(self, user_input) -> bool:
|
||||
"""Return True if the user_input contains a valid dongle path."""
|
||||
dongle_path = user_input[CONF_DEVICE]
|
||||
return await self.hass.async_add_executor_job(dongle.validate_path, dongle_path)
|
||||
try:
|
||||
# Starting the gateway will raise an exception if it can't connect
|
||||
gateway = Gateway(port=dongle_path)
|
||||
await gateway.start()
|
||||
except ConnectionError as exception:
|
||||
LOGGER.warning("Dongle path %s is invalid: %s", dongle_path, str(exception))
|
||||
return False
|
||||
finally:
|
||||
gateway.stop()
|
||||
|
||||
return True
|
||||
|
||||
def create_enocean_entry(self, user_input):
|
||||
"""Create an entry for the provided configuration."""
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
"""Representation of an EnOcean dongle."""
|
||||
|
||||
import glob
|
||||
import logging
|
||||
from os.path import basename, normpath
|
||||
|
||||
from enocean.communicators import SerialCommunicator
|
||||
from enocean.protocol.packet import RadioPacket
|
||||
import serial
|
||||
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
|
||||
|
||||
from .const import SIGNAL_RECEIVE_MESSAGE, SIGNAL_SEND_MESSAGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EnOceanDongle:
|
||||
"""Representation of an EnOcean dongle.
|
||||
|
||||
The dongle is responsible for receiving the EnOcean frames,
|
||||
creating devices if needed, and dispatching messages to platforms.
|
||||
"""
|
||||
|
||||
def __init__(self, hass, serial_path):
|
||||
"""Initialize the EnOcean dongle."""
|
||||
|
||||
self._communicator = SerialCommunicator(
|
||||
port=serial_path, callback=self.callback
|
||||
)
|
||||
self.serial_path = serial_path
|
||||
self.identifier = basename(normpath(serial_path))
|
||||
self.hass = hass
|
||||
self.dispatcher_disconnect_handle = None
|
||||
|
||||
async def async_setup(self):
|
||||
"""Finish the setup of the bridge and supported platforms."""
|
||||
self._communicator.start()
|
||||
self.dispatcher_disconnect_handle = async_dispatcher_connect(
|
||||
self.hass, SIGNAL_SEND_MESSAGE, self._send_message_callback
|
||||
)
|
||||
|
||||
def unload(self):
|
||||
"""Disconnect callbacks established at init time."""
|
||||
if self.dispatcher_disconnect_handle:
|
||||
self.dispatcher_disconnect_handle()
|
||||
self.dispatcher_disconnect_handle = None
|
||||
|
||||
def _send_message_callback(self, command):
|
||||
"""Send a command through the EnOcean dongle."""
|
||||
self._communicator.send(command)
|
||||
|
||||
def callback(self, packet):
|
||||
"""Handle EnOcean device's callback.
|
||||
|
||||
This is the callback function called by python-enocean whenever there
|
||||
is an incoming packet.
|
||||
"""
|
||||
|
||||
if isinstance(packet, RadioPacket):
|
||||
_LOGGER.debug("Received radio packet: %s", packet)
|
||||
dispatcher_send(self.hass, SIGNAL_RECEIVE_MESSAGE, packet)
|
||||
|
||||
|
||||
def detect():
|
||||
"""Return a list of candidate paths for USB EnOcean dongles.
|
||||
|
||||
This method is currently a bit simplistic, it may need to be
|
||||
improved to support more configurations and OS.
|
||||
"""
|
||||
globs_to_test = ["/dev/tty*FTOA2PV*", "/dev/serial/by-id/*EnOcean*"]
|
||||
found_paths = []
|
||||
for current_glob in globs_to_test:
|
||||
found_paths.extend(glob.glob(current_glob))
|
||||
|
||||
return found_paths
|
||||
|
||||
|
||||
def validate_path(path: str):
|
||||
"""Return True if the provided path points to a valid serial port, False otherwise."""
|
||||
try:
|
||||
# Creating the serial communicator will raise an exception
|
||||
# if it cannot connect
|
||||
SerialCommunicator(port=path)
|
||||
except serial.SerialException as exception:
|
||||
_LOGGER.warning("Dongle path %s is invalid: %s", path, str(exception))
|
||||
return False
|
||||
return True
|
||||
@@ -1,12 +1,23 @@
|
||||
"""Representation of an EnOcean device."""
|
||||
|
||||
from enocean.protocol.packet import Packet
|
||||
from enocean.utils import combine_hex
|
||||
from enocean_async import EURID, Address, BaseAddress, ERP1Telegram, SenderAddress
|
||||
from enocean_async.esp3.packet import ESP3Packet, ESP3PacketType
|
||||
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect, dispatcher_send
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from .const import SIGNAL_RECEIVE_MESSAGE, SIGNAL_SEND_MESSAGE
|
||||
from .const import LOGGER, SIGNAL_RECEIVE_MESSAGE, SIGNAL_SEND_MESSAGE
|
||||
|
||||
|
||||
def combine_hex(dev_id: list[int]) -> int:
|
||||
"""Combine list of integer values to one big integer.
|
||||
|
||||
This function replaces the previously used function from the enocean library and is considered tech debt that will have to be replaced.
|
||||
"""
|
||||
value = 0
|
||||
for byte in dev_id:
|
||||
value = (value << 8) | (byte & 0xFF)
|
||||
return value
|
||||
|
||||
|
||||
class EnOceanEntity(Entity):
|
||||
@@ -14,7 +25,16 @@ class EnOceanEntity(Entity):
|
||||
|
||||
def __init__(self, dev_id: list[int]) -> None:
|
||||
"""Initialize the device."""
|
||||
self.dev_id = dev_id
|
||||
self.address: SenderAddress | None = None
|
||||
|
||||
try:
|
||||
address = Address.from_bytelist(dev_id)
|
||||
if address.is_eurid():
|
||||
self.address = EURID.from_number(address.to_number())
|
||||
elif address.is_base_address():
|
||||
self.address = BaseAddress.from_number(address.to_number())
|
||||
except ValueError:
|
||||
self.address = None
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
@@ -24,17 +44,25 @@ class EnOceanEntity(Entity):
|
||||
)
|
||||
)
|
||||
|
||||
def _message_received_callback(self, packet):
|
||||
def _message_received_callback(self, telegram: ERP1Telegram) -> None:
|
||||
"""Handle incoming packets."""
|
||||
if not self.address:
|
||||
return
|
||||
|
||||
if packet.sender_int == combine_hex(self.dev_id):
|
||||
self.value_changed(packet)
|
||||
if telegram.sender == self.address:
|
||||
self.value_changed(telegram)
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the device when a packet arrives."""
|
||||
|
||||
def send_command(self, data, optional, packet_type):
|
||||
"""Send a command via the EnOcean dongle."""
|
||||
|
||||
packet = Packet(packet_type, data=data, optional=optional)
|
||||
dispatcher_send(self.hass, SIGNAL_SEND_MESSAGE, packet)
|
||||
def send_command(
|
||||
self, data: list[int], optional: list[int], packet_type: ESP3PacketType
|
||||
) -> None:
|
||||
"""Send a command via the EnOcean dongle, if data and optional are valid bytes; otherwise, ignore."""
|
||||
try:
|
||||
packet = ESP3Packet(packet_type, data=bytes(data), optional=bytes(optional))
|
||||
dispatcher_send(self.hass, SIGNAL_SEND_MESSAGE, packet)
|
||||
except ValueError as err:
|
||||
LOGGER.warning(
|
||||
"Failed to send command: invalid data or optional bytes: %s", err
|
||||
)
|
||||
|
||||
@@ -5,7 +5,8 @@ from __future__ import annotations
|
||||
import math
|
||||
from typing import Any
|
||||
|
||||
from enocean.utils import combine_hex
|
||||
from enocean_async import ERP1Telegram
|
||||
from enocean_async.esp3.packet import ESP3PacketType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.light import (
|
||||
@@ -20,7 +21,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .entity import EnOceanEntity
|
||||
from .entity import EnOceanEntity, combine_hex
|
||||
|
||||
CONF_SENDER_ID = "sender_id"
|
||||
|
||||
@@ -75,7 +76,8 @@ class EnOceanLight(EnOceanEntity, LightEntity):
|
||||
command = [0xA5, 0x02, bval, 0x01, 0x09]
|
||||
command.extend(self._sender_id)
|
||||
command.extend([0x00])
|
||||
self.send_command(command, [], 0x01)
|
||||
packet_type = ESP3PacketType(0x01)
|
||||
self.send_command(command, [], packet_type)
|
||||
self._attr_is_on = True
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
@@ -83,17 +85,18 @@ class EnOceanLight(EnOceanEntity, LightEntity):
|
||||
command = [0xA5, 0x02, 0x00, 0x01, 0x09]
|
||||
command.extend(self._sender_id)
|
||||
command.extend([0x00])
|
||||
self.send_command(command, [], 0x01)
|
||||
packet_type = ESP3PacketType(0x01)
|
||||
self.send_command(command, [], packet_type)
|
||||
self._attr_is_on = False
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of this device.
|
||||
|
||||
Dimmer devices like Eltako FUD61 send telegram in different RORGs.
|
||||
We only care about the 4BS (0xA5).
|
||||
"""
|
||||
if packet.data[0] == 0xA5 and packet.data[1] == 0x02:
|
||||
val = packet.data[2]
|
||||
if telegram.rorg == 0xA5 and telegram.telegram_data[0] == 0x02:
|
||||
val = telegram.telegram_data[1]
|
||||
self._attr_brightness = math.floor(val / 100.0 * 256.0)
|
||||
self._attr_is_on = bool(val != 0)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/enocean",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["enocean"],
|
||||
"requirements": ["enocean==0.50"],
|
||||
"loggers": ["enocean_async"],
|
||||
"requirements": ["enocean-async==0.4.1"],
|
||||
"single_config_entry": true,
|
||||
"usb": [
|
||||
{
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from enocean.utils import combine_hex
|
||||
from enocean_async import EEP, EEP_SPECIFICATIONS, EEPHandler, EEPMessage, ERP1Telegram
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -30,7 +30,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .entity import EnOceanEntity
|
||||
from .entity import EnOceanEntity, combine_hex
|
||||
|
||||
CONF_MAX_TEMP = "max_temp"
|
||||
CONF_MIN_TEMP = "min_temp"
|
||||
@@ -166,7 +166,7 @@ class EnOceanSensor(EnOceanEntity, RestoreSensor):
|
||||
if (sensor_data := await self.async_get_last_sensor_data()) is not None:
|
||||
self._attr_native_value = sensor_data.native_value
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the sensor."""
|
||||
|
||||
|
||||
@@ -177,15 +177,19 @@ class EnOceanPowerSensor(EnOceanSensor):
|
||||
- A5-12-01 (Automated Meter Reading, Electricity)
|
||||
"""
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the sensor."""
|
||||
if packet.rorg != 0xA5:
|
||||
if telegram.rorg != 0xA5:
|
||||
return
|
||||
packet.parse_eep(0x12, 0x01)
|
||||
if packet.parsed["DT"]["raw_value"] == 1:
|
||||
|
||||
if (eep := EEP_SPECIFICATIONS.get(EEP(0xA5, 0x12, 0x01))) is None:
|
||||
return
|
||||
msg: EEPMessage = EEPHandler(eep).decode(telegram)
|
||||
|
||||
if "DT" in msg.values and msg.values["DT"].raw == 1:
|
||||
# this packet reports the current value
|
||||
raw_val = packet.parsed["MR"]["raw_value"]
|
||||
divisor = packet.parsed["DIV"]["raw_value"]
|
||||
raw_val = msg.values["MR"].raw
|
||||
divisor = msg.values["DIV"].raw
|
||||
self._attr_native_value = raw_val / (10**divisor)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -226,13 +230,13 @@ class EnOceanTemperatureSensor(EnOceanSensor):
|
||||
self.range_from = range_from
|
||||
self.range_to = range_to
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the sensor."""
|
||||
if packet.data[0] != 0xA5:
|
||||
if telegram.rorg != 0xA5:
|
||||
return
|
||||
temp_scale = self._scale_max - self._scale_min
|
||||
temp_range = self.range_to - self.range_from
|
||||
raw_val = packet.data[3]
|
||||
raw_val = telegram.telegram_data[2]
|
||||
temperature = temp_scale / temp_range * (raw_val - self.range_from)
|
||||
temperature += self._scale_min
|
||||
self._attr_native_value = round(temperature, 1)
|
||||
@@ -248,11 +252,11 @@ class EnOceanHumiditySensor(EnOceanSensor):
|
||||
- A5-10-10 to A5-10-14 (Room Operating Panels)
|
||||
"""
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the sensor."""
|
||||
if packet.rorg != 0xA5:
|
||||
if telegram.rorg != 0xA5:
|
||||
return
|
||||
humidity = packet.data[2] * 100 / 250
|
||||
humidity = telegram.telegram_data[1] * 100 / 250
|
||||
self._attr_native_value = round(humidity, 1)
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -264,9 +268,9 @@ class EnOceanWindowHandle(EnOceanSensor):
|
||||
- F6-10-00 (Mechanical handle / Hoppe AG)
|
||||
"""
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the sensor."""
|
||||
action = (packet.data[1] & 0x70) >> 4
|
||||
action = (telegram.telegram_data[0] & 0x70) >> 4
|
||||
|
||||
if action == 0x07:
|
||||
self._attr_native_value = STATE_CLOSED
|
||||
|
||||
@@ -4,7 +4,8 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from enocean.utils import combine_hex
|
||||
from enocean_async import EEP, EEP_SPECIFICATIONS, EEPHandler, EEPMessage, ERP1Telegram
|
||||
from enocean_async.esp3.packet import ESP3PacketType
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.switch import (
|
||||
@@ -18,7 +19,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .entity import EnOceanEntity
|
||||
from .entity import EnOceanEntity, combine_hex
|
||||
|
||||
CONF_CHANNEL = "channel"
|
||||
DEFAULT_NAME = "EnOcean Switch"
|
||||
@@ -86,52 +87,68 @@ class EnOceanSwitch(EnOceanEntity, SwitchEntity):
|
||||
"""Initialize the EnOcean switch device."""
|
||||
super().__init__(dev_id)
|
||||
self._light = None
|
||||
self.channel = channel
|
||||
self.channel: int = channel
|
||||
self._attr_unique_id = generate_unique_id(dev_id, channel)
|
||||
self._attr_name = dev_name
|
||||
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on the switch."""
|
||||
if not self.address:
|
||||
return
|
||||
|
||||
optional = [0x03]
|
||||
optional.extend(self.dev_id)
|
||||
optional.extend(self.address.to_bytelist())
|
||||
optional.extend([0xFF, 0x00])
|
||||
self.send_command(
|
||||
data=[0xD2, 0x01, self.channel & 0xFF, 0x64, 0x00, 0x00, 0x00, 0x00, 0x00],
|
||||
optional=optional,
|
||||
packet_type=0x01,
|
||||
packet_type=ESP3PacketType(0x01),
|
||||
)
|
||||
self._attr_is_on = True
|
||||
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off the switch."""
|
||||
if not self.address:
|
||||
return
|
||||
optional = [0x03]
|
||||
optional.extend(self.dev_id)
|
||||
optional.extend(self.address.to_bytelist())
|
||||
optional.extend([0xFF, 0x00])
|
||||
self.send_command(
|
||||
data=[0xD2, 0x01, self.channel & 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00],
|
||||
optional=optional,
|
||||
packet_type=0x01,
|
||||
packet_type=ESP3PacketType(0x01),
|
||||
)
|
||||
self._attr_is_on = False
|
||||
|
||||
def value_changed(self, packet):
|
||||
def value_changed(self, telegram: ERP1Telegram) -> None:
|
||||
"""Update the internal state of the switch."""
|
||||
if packet.data[0] == 0xA5:
|
||||
# power meter telegram, turn on if > 10 watts
|
||||
packet.parse_eep(0x12, 0x01)
|
||||
if packet.parsed["DT"]["raw_value"] == 1:
|
||||
raw_val = packet.parsed["MR"]["raw_value"]
|
||||
divisor = packet.parsed["DIV"]["raw_value"]
|
||||
if telegram.rorg == 0xA5:
|
||||
# power meter telegram, turn on if > 1 watts
|
||||
if (eep := EEP_SPECIFICATIONS.get(EEP(0xA5, 0x12, 0x01))) is None:
|
||||
LOGGER.warning("EEP A5-12-01 cannot be decoded")
|
||||
return
|
||||
|
||||
msg: EEPMessage = EEPHandler(eep).decode(telegram)
|
||||
|
||||
if "DT" in msg.values and msg.values["DT"].raw == 1:
|
||||
# this packet reports the current value
|
||||
raw_val = msg.values["MR"].raw
|
||||
divisor = msg.values["DIV"].raw
|
||||
watts = raw_val / (10**divisor)
|
||||
if watts > 1:
|
||||
self._attr_is_on = True
|
||||
self.schedule_update_ha_state()
|
||||
elif packet.data[0] == 0xD2:
|
||||
|
||||
elif telegram.rorg == 0xD2:
|
||||
# actuator status telegram
|
||||
packet.parse_eep(0x01, 0x01)
|
||||
if packet.parsed["CMD"]["raw_value"] == 4:
|
||||
channel = packet.parsed["IO"]["raw_value"]
|
||||
output = packet.parsed["OV"]["raw_value"]
|
||||
if (eep := EEP_SPECIFICATIONS.get(EEP(0xD2, 0x01, 0x01))) is None:
|
||||
LOGGER.warning("EEP D2-01-01 cannot be decoded")
|
||||
return
|
||||
|
||||
msg = EEPHandler(eep).decode(telegram)
|
||||
if msg.values["CMD"].raw == 4:
|
||||
channel = msg.values["I/O"].raw
|
||||
output = msg.values["OV"].raw
|
||||
if channel == self.channel:
|
||||
self._attr_is_on = output > 0
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -17,9 +17,9 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==44.1.0",
|
||||
"aioesphomeapi==44.3.1",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.6.0"
|
||||
"bleak-esphome==3.7.1"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -23,12 +24,64 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADMIN_API_KEY): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class GhostConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Ghost."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth confirmation."""
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
admin_api_key = user_input[CONF_ADMIN_API_KEY]
|
||||
|
||||
if ":" not in admin_api_key:
|
||||
errors["base"] = "invalid_api_key"
|
||||
else:
|
||||
try:
|
||||
await self._validate_credentials(
|
||||
reauth_entry.data[CONF_API_URL], admin_api_key
|
||||
)
|
||||
except GhostAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
except GhostError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error during Ghost reauth")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=STEP_REAUTH_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"title": reauth_entry.title,
|
||||
"docs_url": "https://account.ghost.org/?r=settings/integrations/new",
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioghost"],
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aioghost==0.4.0"]
|
||||
}
|
||||
|
||||
@@ -38,7 +38,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "This Ghost site is already configured."
|
||||
"already_configured": "This Ghost site is already configured.",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect to Ghost. Please check your URL.",
|
||||
@@ -10,6 +11,16 @@
|
||||
"unknown": "An unexpected error occurred."
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"admin_api_key": "[%key:component::ghost::config::step::user::data::admin_api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"admin_api_key": "[%key:component::ghost::config::step::user::data_description::admin_api_key%]"
|
||||
},
|
||||
"description": "Your API key for {title} is invalid. [Create a new integration key]({docs_url}) to reauthenticate.",
|
||||
"title": "[%key:common::config_flow::title::reauth%]"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"admin_api_key": "Admin API key",
|
||||
|
||||
@@ -55,8 +55,6 @@ def setup_platform(
|
||||
) -> None:
|
||||
"""Set up the heatmiser thermostat."""
|
||||
|
||||
heatmiser_v3_thermostat = heatmiser.HeatmiserThermostat
|
||||
|
||||
host = config[CONF_HOST]
|
||||
port = config[CONF_PORT]
|
||||
|
||||
@@ -65,10 +63,7 @@ def setup_platform(
|
||||
uh1_hub = connection.HeatmiserUH1(host, port)
|
||||
|
||||
add_entities(
|
||||
[
|
||||
HeatmiserV3Thermostat(heatmiser_v3_thermostat, thermostat, uh1_hub)
|
||||
for thermostat in thermostats
|
||||
],
|
||||
[HeatmiserV3Thermostat(thermostat, uh1_hub) for thermostat in thermostats],
|
||||
True,
|
||||
)
|
||||
|
||||
@@ -83,44 +78,31 @@ class HeatmiserV3Thermostat(ClimateEntity):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def __init__(self, therm, device, uh1):
|
||||
def __init__(
|
||||
self,
|
||||
device: dict[str, Any],
|
||||
uh1: connection.HeatmiserUH1,
|
||||
) -> None:
|
||||
"""Initialize the thermostat."""
|
||||
self.therm = therm(device[CONF_ID], "prt", uh1)
|
||||
self.therm = heatmiser.HeatmiserThermostat(device[CONF_ID], "prt", uh1)
|
||||
self.uh1 = uh1
|
||||
self._name = device[CONF_NAME]
|
||||
self._current_temperature = None
|
||||
self._target_temperature = None
|
||||
self._attr_name = device[CONF_NAME]
|
||||
self._id = device
|
||||
self.dcb = None
|
||||
self._attr_hvac_mode = HVACMode.HEAT
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the thermostat, if any."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
"""Return the current temperature."""
|
||||
return self._current_temperature
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
"""Return the temperature we try to reach."""
|
||||
return self._target_temperature
|
||||
|
||||
def set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
return
|
||||
self._target_temperature = int(temperature)
|
||||
self.therm.set_target_temp(self._target_temperature)
|
||||
self._attr_target_temperature = int(temperature)
|
||||
self.therm.set_target_temp(self._attr_target_temperature)
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data."""
|
||||
self.uh1.reopen()
|
||||
if not self.uh1.status:
|
||||
_LOGGER.error("Failed to update device %s", self._name)
|
||||
_LOGGER.error("Failed to update device %s", self.name)
|
||||
return
|
||||
self.dcb = self.therm.read_dcb()
|
||||
self._attr_temperature_unit = (
|
||||
@@ -128,8 +110,8 @@ class HeatmiserV3Thermostat(ClimateEntity):
|
||||
if (self.therm.get_temperature_format() == "C")
|
||||
else UnitOfTemperature.FAHRENHEIT
|
||||
)
|
||||
self._current_temperature = int(self.therm.get_floor_temp())
|
||||
self._target_temperature = int(self.therm.get_target_temp())
|
||||
self._attr_current_temperature = int(self.therm.get_floor_temp())
|
||||
self._attr_target_temperature = int(self.therm.get_target_temp())
|
||||
self._attr_hvac_mode = (
|
||||
HVACMode.OFF
|
||||
if (int(self.therm.get_current_state()) == 0)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
"""Constants for the Home Connect integration."""
|
||||
|
||||
from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey, OptionKey, ProgramKey, SettingKey, StatusKey
|
||||
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime, UnitOfVolume
|
||||
@@ -76,9 +74,9 @@ AFFECTS_TO_SELECTED_PROGRAM = "selected_program"
|
||||
|
||||
|
||||
TRANSLATION_KEYS_PROGRAMS_MAP = {
|
||||
bsh_key_to_translation_key(program.value): cast(ProgramKey, program)
|
||||
bsh_key_to_translation_key(program.value): program
|
||||
for program in ProgramKey
|
||||
if program != ProgramKey.UNKNOWN
|
||||
if program not in (ProgramKey.UNKNOWN, ProgramKey.BSH_COMMON_FAVORITE_001)
|
||||
}
|
||||
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP = {
|
||||
|
||||
@@ -23,6 +23,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.28.0"],
|
||||
"requirements": ["aiohomeconnect==0.30.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -403,7 +403,7 @@ class HomeConnectProgramSelectEntity(HomeConnectEntity, SelectEntity):
|
||||
self._attr_options = [
|
||||
PROGRAMS_TRANSLATION_KEYS_MAP[program.key]
|
||||
for program in self.appliance.programs
|
||||
if program.key != ProgramKey.UNKNOWN
|
||||
if program.key in PROGRAMS_TRANSLATION_KEYS_MAP
|
||||
and (
|
||||
program.constraints is None
|
||||
or program.constraints.execution
|
||||
|
||||
@@ -91,14 +91,14 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
translation_key="energy_exported",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="energy_imported",
|
||||
translation_key="energy_imported",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="frequency",
|
||||
|
||||
@@ -901,7 +901,9 @@ class PowerViewShadeDualOverlappedRear(PowerViewShadeDualOverlappedBase):
|
||||
)
|
||||
|
||||
|
||||
class PowerViewShadeDualOverlappedCombinedTilt(PowerViewShadeDualOverlappedCombined):
|
||||
class PowerViewShadeDualOverlappedCombinedTilt(
|
||||
PowerViewShadeDualOverlappedCombined, PowerViewShadeWithTiltBase
|
||||
):
|
||||
"""Represent a shade that has a front sheer and rear opaque panel.
|
||||
|
||||
This equates to two shades being controlled by one motor.
|
||||
@@ -915,26 +917,6 @@ class PowerViewShadeDualOverlappedCombinedTilt(PowerViewShadeDualOverlappedCombi
|
||||
Type 10 - Duolite with 180° Tilt
|
||||
"""
|
||||
|
||||
# type
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PowerviewShadeUpdateCoordinator,
|
||||
device_info: PowerviewDeviceInfo,
|
||||
room_name: str,
|
||||
shade: BaseShade,
|
||||
name: str,
|
||||
) -> None:
|
||||
"""Initialize the shade."""
|
||||
super().__init__(coordinator, device_info, room_name, shade, name)
|
||||
self._attr_supported_features |= (
|
||||
CoverEntityFeature.OPEN_TILT
|
||||
| CoverEntityFeature.CLOSE_TILT
|
||||
| CoverEntityFeature.SET_TILT_POSITION
|
||||
)
|
||||
if self._shade.is_supported(MOTION_STOP):
|
||||
self._attr_supported_features |= CoverEntityFeature.STOP_TILT
|
||||
self._max_tilt = self._shade.shade_limits.tilt_max
|
||||
|
||||
@property
|
||||
def transition_steps(self) -> int:
|
||||
"""Return the steps to make a move."""
|
||||
@@ -949,26 +931,6 @@ class PowerViewShadeDualOverlappedCombinedTilt(PowerViewShadeDualOverlappedCombi
|
||||
tilt = self.positions.tilt
|
||||
return ceil(primary + secondary + tilt)
|
||||
|
||||
@callback
|
||||
def _get_shade_tilt(self, target_hass_tilt_position: int) -> ShadePosition:
|
||||
"""Return a ShadePosition."""
|
||||
return ShadePosition(
|
||||
tilt=target_hass_tilt_position,
|
||||
velocity=self.positions.velocity,
|
||||
)
|
||||
|
||||
@property
|
||||
def open_tilt_position(self) -> ShadePosition:
|
||||
"""Return the open tilt position and required additional positions."""
|
||||
return replace(self._shade.open_position_tilt, velocity=self.positions.velocity)
|
||||
|
||||
@property
|
||||
def close_tilt_position(self) -> ShadePosition:
|
||||
"""Return the open tilt position and required additional positions."""
|
||||
return replace(
|
||||
self._shade.close_position_tilt, velocity=self.positions.velocity
|
||||
)
|
||||
|
||||
|
||||
TYPE_TO_CLASSES = {
|
||||
0: (PowerViewShade,),
|
||||
|
||||
@@ -627,13 +627,17 @@ class IntentHandleView(http.HomeAssistantView):
|
||||
{
|
||||
vol.Required("name"): cv.string,
|
||||
vol.Optional("data"): vol.Schema({cv.string: object}),
|
||||
vol.Optional("language"): cv.string,
|
||||
vol.Optional("assistant"): vol.Any(cv.string, None),
|
||||
vol.Optional("device_id"): vol.Any(cv.string, None),
|
||||
vol.Optional("satellite_id"): vol.Any(cv.string, None),
|
||||
}
|
||||
)
|
||||
)
|
||||
async def post(self, request: web.Request, data: dict[str, Any]) -> web.Response:
|
||||
"""Handle intent with name/data."""
|
||||
hass = request.app[http.KEY_HASS]
|
||||
language = hass.config.language
|
||||
language = data.get("language", hass.config.language)
|
||||
|
||||
try:
|
||||
intent_name = data["name"]
|
||||
@@ -641,14 +645,21 @@ class IntentHandleView(http.HomeAssistantView):
|
||||
key: {"value": value} for key, value in data.get("data", {}).items()
|
||||
}
|
||||
intent_result = await intent.async_handle(
|
||||
hass, DOMAIN, intent_name, slots, "", self.context(request)
|
||||
hass,
|
||||
DOMAIN,
|
||||
intent_name,
|
||||
slots,
|
||||
"",
|
||||
self.context(request),
|
||||
language=language,
|
||||
assistant=data.get("assistant"),
|
||||
device_id=data.get("device_id"),
|
||||
satellite_id=data.get("satellite_id"),
|
||||
)
|
||||
except (intent.IntentHandleError, intent.MatchFailedError) as err:
|
||||
intent_result = intent.IntentResponse(language=language)
|
||||
intent_result.async_set_speech(str(err))
|
||||
|
||||
if intent_result is None:
|
||||
intent_result = intent.IntentResponse(language=language) # type: ignore[unreachable]
|
||||
intent_result.async_set_speech("Sorry, I couldn't handle that")
|
||||
intent_result.async_set_error(
|
||||
intent.IntentResponseErrorCode.FAILED_TO_HANDLE, str(err)
|
||||
)
|
||||
|
||||
return self.json(intent_result)
|
||||
|
||||
@@ -221,13 +221,13 @@ class IntesisAC(ClimateEntity):
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
"""Return the device specific state attributes."""
|
||||
attrs = {}
|
||||
if self._outdoor_temp:
|
||||
if self._outdoor_temp is not None:
|
||||
attrs["outdoor_temp"] = self._outdoor_temp
|
||||
if self._power_consumption_heat:
|
||||
if self._power_consumption_heat is not None:
|
||||
attrs["power_consumption_heat_kw"] = round(
|
||||
self._power_consumption_heat / 1000, 1
|
||||
)
|
||||
if self._power_consumption_cool:
|
||||
if self._power_consumption_cool is not None:
|
||||
attrs["power_consumption_cool_kw"] = round(
|
||||
self._power_consumption_cool / 1000, 1
|
||||
)
|
||||
@@ -244,7 +244,7 @@ class IntesisAC(ClimateEntity):
|
||||
if hvac_mode := kwargs.get(ATTR_HVAC_MODE):
|
||||
await self.async_set_hvac_mode(hvac_mode)
|
||||
|
||||
if temperature := kwargs.get(ATTR_TEMPERATURE):
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is not None:
|
||||
_LOGGER.debug("Setting %s to %s degrees", self._device_type, temperature)
|
||||
await self._controller.set_temperature(self._device_id, temperature)
|
||||
self._attr_target_temperature = temperature
|
||||
@@ -271,7 +271,7 @@ class IntesisAC(ClimateEntity):
|
||||
await self._controller.set_mode(self._device_id, MAP_HVAC_MODE_TO_IH[hvac_mode])
|
||||
|
||||
# Send the temperature again in case changing modes has changed it
|
||||
if self._attr_target_temperature:
|
||||
if self._attr_target_temperature is not None:
|
||||
await self._controller.set_temperature(
|
||||
self._device_id, self._attr_target_temperature
|
||||
)
|
||||
|
||||
@@ -358,7 +358,7 @@ PINECIL_SETPOINT_NUMBER_DESCRIPTION = IronOSNumberEntityDescription(
|
||||
native_max_value=MAX_TEMP,
|
||||
native_min_value_f=MIN_TEMP_F,
|
||||
native_max_value_f=MAX_TEMP_F,
|
||||
native_step=5,
|
||||
native_step=1,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from xknx.dpt import DPTBase, DPTComplex, DPTEnum, DPTNumeric
|
||||
from xknx.dpt.dpt_16 import DPTString
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
||||
from homeassistant.const import UnitOfReactiveEnergy
|
||||
|
||||
HaDptClass = Literal["numeric", "enum", "complex", "string"]
|
||||
|
||||
@@ -36,7 +37,7 @@ def get_supported_dpts() -> Mapping[str, DPTInfo]:
|
||||
main=dpt_class.dpt_main_number, # type: ignore[typeddict-item] # checked in xknx unit tests
|
||||
sub=dpt_class.dpt_sub_number,
|
||||
name=dpt_class.value_type,
|
||||
unit=dpt_class.unit,
|
||||
unit=_sensor_unit_overrides.get(dpt_number_str, dpt_class.unit),
|
||||
sensor_device_class=_sensor_device_classes.get(dpt_number_str),
|
||||
sensor_state_class=_get_sensor_state_class(ha_dpt_class, dpt_number_str),
|
||||
)
|
||||
@@ -77,13 +78,13 @@ _sensor_device_classes: Mapping[str, SensorDeviceClass] = {
|
||||
"12.1200": SensorDeviceClass.VOLUME,
|
||||
"12.1201": SensorDeviceClass.VOLUME,
|
||||
"13.002": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"13.010": SensorDeviceClass.ENERGY,
|
||||
"13.012": SensorDeviceClass.REACTIVE_ENERGY,
|
||||
"13.013": SensorDeviceClass.ENERGY,
|
||||
"13.015": SensorDeviceClass.REACTIVE_ENERGY,
|
||||
"13.016": SensorDeviceClass.ENERGY,
|
||||
"13.1200": SensorDeviceClass.VOLUME,
|
||||
"13.1201": SensorDeviceClass.VOLUME,
|
||||
"13.010": SensorDeviceClass.ENERGY, # DPTActiveEnergy
|
||||
"13.012": SensorDeviceClass.REACTIVE_ENERGY, # DPTReactiveEnergy
|
||||
"13.013": SensorDeviceClass.ENERGY, # DPTActiveEnergykWh
|
||||
"13.015": SensorDeviceClass.REACTIVE_ENERGY, # DPTReactiveEnergykVARh
|
||||
"13.016": SensorDeviceClass.ENERGY, # DPTActiveEnergyMWh
|
||||
"13.1200": SensorDeviceClass.VOLUME, # DPTDeltaVolumeLiquidLitre
|
||||
"13.1201": SensorDeviceClass.VOLUME, # DPTDeltaVolumeM3
|
||||
"14.010": SensorDeviceClass.AREA,
|
||||
"14.019": SensorDeviceClass.CURRENT,
|
||||
"14.027": SensorDeviceClass.VOLTAGE,
|
||||
@@ -91,7 +92,7 @@ _sensor_device_classes: Mapping[str, SensorDeviceClass] = {
|
||||
"14.030": SensorDeviceClass.VOLTAGE,
|
||||
"14.031": SensorDeviceClass.ENERGY,
|
||||
"14.033": SensorDeviceClass.FREQUENCY,
|
||||
"14.037": SensorDeviceClass.ENERGY_STORAGE,
|
||||
"14.037": SensorDeviceClass.ENERGY_STORAGE, # DPTHeatQuantity
|
||||
"14.039": SensorDeviceClass.DISTANCE,
|
||||
"14.051": SensorDeviceClass.WEIGHT,
|
||||
"14.056": SensorDeviceClass.POWER,
|
||||
@@ -101,7 +102,7 @@ _sensor_device_classes: Mapping[str, SensorDeviceClass] = {
|
||||
"14.068": SensorDeviceClass.TEMPERATURE,
|
||||
"14.069": SensorDeviceClass.TEMPERATURE,
|
||||
"14.070": SensorDeviceClass.TEMPERATURE_DELTA,
|
||||
"14.076": SensorDeviceClass.VOLUME,
|
||||
"14.076": SensorDeviceClass.VOLUME, # DPTVolume
|
||||
"14.077": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
"14.080": SensorDeviceClass.APPARENT_POWER,
|
||||
"14.1200": SensorDeviceClass.VOLUME_FLOW_RATE,
|
||||
@@ -121,17 +122,28 @@ _sensor_state_class_overrides: Mapping[str, SensorStateClass | None] = {
|
||||
"13.010": SensorStateClass.TOTAL, # DPTActiveEnergy
|
||||
"13.011": SensorStateClass.TOTAL, # DPTApparantEnergy
|
||||
"13.012": SensorStateClass.TOTAL, # DPTReactiveEnergy
|
||||
"13.013": SensorStateClass.TOTAL, # DPTActiveEnergykWh
|
||||
"13.015": SensorStateClass.TOTAL, # DPTReactiveEnergykVARh
|
||||
"13.016": SensorStateClass.TOTAL, # DPTActiveEnergyMWh
|
||||
"13.1200": SensorStateClass.TOTAL, # DPTDeltaVolumeLiquidLitre
|
||||
"13.1201": SensorStateClass.TOTAL, # DPTDeltaVolumeM3
|
||||
"14.007": SensorStateClass.MEASUREMENT_ANGLE, # DPTAngleDeg
|
||||
"14.037": SensorStateClass.TOTAL, # DPTHeatQuantity
|
||||
"14.051": SensorStateClass.TOTAL, # DPTMass
|
||||
"14.055": SensorStateClass.MEASUREMENT_ANGLE, # DPTPhaseAngleDeg
|
||||
"14.031": SensorStateClass.TOTAL_INCREASING, # DPTEnergy
|
||||
"14.076": SensorStateClass.TOTAL, # DPTVolume
|
||||
"17.001": None, # DPTSceneNumber
|
||||
"29.010": SensorStateClass.TOTAL, # DPTActiveEnergy8Byte
|
||||
"29.011": SensorStateClass.TOTAL, # DPTApparantEnergy8Byte
|
||||
"29.012": SensorStateClass.TOTAL, # DPTReactiveEnergy8Byte
|
||||
}
|
||||
|
||||
_sensor_unit_overrides: Mapping[str, str] = {
|
||||
"13.012": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR, # DPTReactiveEnergy (VARh in KNX)
|
||||
"13.015": UnitOfReactiveEnergy.KILO_VOLT_AMPERE_REACTIVE_HOUR, # DPTReactiveEnergykVARh (kVARh in KNX)
|
||||
"29.012": UnitOfReactiveEnergy.VOLT_AMPERE_REACTIVE_HOUR, # DPTReactiveEnergy8Byte (VARh in KNX)
|
||||
}
|
||||
|
||||
|
||||
def _get_sensor_state_class(
|
||||
ha_dpt_class: HaDptClass, dpt_number_str: str
|
||||
|
||||
@@ -39,6 +39,7 @@ from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_PAYLOAD,
|
||||
CONF_TYPE,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
Platform,
|
||||
)
|
||||
@@ -867,6 +868,7 @@ class SensorSchema(KNXPlatformSchema):
|
||||
vol.Required(CONF_TYPE): sensor_type_validator,
|
||||
vol.Required(CONF_STATE_ADDRESS): ga_list_validator,
|
||||
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
vol.Optional(CONF_ENTITY_CATEGORY): ENTITY_CATEGORIES_SCHEMA,
|
||||
}
|
||||
),
|
||||
|
||||
@@ -216,20 +216,22 @@ class KnxYamlSensor(_KnxSensor, KnxYamlEntity):
|
||||
dpt_string = self._device.sensor_value.dpt_class.dpt_number_str()
|
||||
dpt_info = get_supported_dpts()[dpt_string]
|
||||
|
||||
if device_class := config.get(CONF_DEVICE_CLASS):
|
||||
self._attr_device_class = device_class
|
||||
else:
|
||||
self._attr_device_class = dpt_info["sensor_device_class"]
|
||||
|
||||
self._attr_state_class = (
|
||||
config.get(CONF_STATE_CLASS) or dpt_info["sensor_state_class"]
|
||||
self._attr_device_class = config.get(
|
||||
CONF_DEVICE_CLASS,
|
||||
dpt_info["sensor_device_class"],
|
||||
)
|
||||
|
||||
self._attr_native_unit_of_measurement = dpt_info["unit"]
|
||||
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
|
||||
self._attr_extra_state_attributes = {}
|
||||
self._attr_force_update = config[SensorSchema.CONF_ALWAYS_CALLBACK]
|
||||
self._attr_native_unit_of_measurement = config.get(
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
dpt_info["unit"],
|
||||
)
|
||||
self._attr_state_class = config.get(
|
||||
CONF_STATE_CLASS,
|
||||
dpt_info["sensor_state_class"],
|
||||
)
|
||||
self._attr_unique_id = str(self._device.sensor_value.group_address_state)
|
||||
|
||||
|
||||
class KnxUiSensor(_KnxSensor, KnxUiEntity):
|
||||
|
||||
@@ -2,35 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import krakenex
|
||||
import pykrakenapi
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_TRACKED_ASSET_PAIR,
|
||||
DISPATCH_CONFIG_UPDATED,
|
||||
DOMAIN,
|
||||
KrakenResponse,
|
||||
)
|
||||
from .utils import get_tradable_asset_pairs
|
||||
|
||||
CALL_RATE_LIMIT_SLEEP = 1
|
||||
from .const import DISPATCH_CONFIG_UPDATED, DOMAIN
|
||||
from .coordinator import KrakenData
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up kraken from a config entry."""
|
||||
@@ -53,111 +34,6 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
||||
return unload_ok
|
||||
|
||||
|
||||
class KrakenData:
|
||||
"""Define an object to hold kraken data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self._hass = hass
|
||||
self._config_entry = config_entry
|
||||
self._api = pykrakenapi.KrakenAPI(krakenex.API(), retry=0, crl_sleep=0)
|
||||
self.tradable_asset_pairs: dict[str, str] = {}
|
||||
self.coordinator: DataUpdateCoordinator[KrakenResponse | None] | None = None
|
||||
|
||||
async def async_update(self) -> KrakenResponse | None:
|
||||
"""Get the latest data from the Kraken.com REST API.
|
||||
|
||||
All tradeable asset pairs are retrieved, not the tracked asset pairs
|
||||
selected by the user. This enables us to check for an unknown and
|
||||
thus likely removed asset pair in sensor.py and only log a warning
|
||||
once.
|
||||
"""
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
return await self._hass.async_add_executor_job(self._get_kraken_data)
|
||||
except pykrakenapi.pykrakenapi.KrakenAPIError as error:
|
||||
if "Unknown asset pair" in str(error):
|
||||
_LOGGER.warning(
|
||||
"Kraken.com reported an unknown asset pair. Refreshing list of"
|
||||
" tradable asset pairs"
|
||||
)
|
||||
await self._async_refresh_tradable_asset_pairs()
|
||||
else:
|
||||
raise UpdateFailed(
|
||||
f"Unable to fetch data from Kraken.com: {error}"
|
||||
) from error
|
||||
except pykrakenapi.pykrakenapi.CallRateLimitError:
|
||||
_LOGGER.warning(
|
||||
"Exceeded the Kraken.com call rate limit. Increase the update interval"
|
||||
" to prevent this error"
|
||||
)
|
||||
return None
|
||||
|
||||
def _get_kraken_data(self) -> KrakenResponse:
|
||||
websocket_name_pairs = self._get_websocket_name_asset_pairs()
|
||||
ticker_df = self._api.get_ticker_information(websocket_name_pairs)
|
||||
# Rename columns to their full name
|
||||
ticker_df = ticker_df.rename(
|
||||
columns={
|
||||
"a": "ask",
|
||||
"b": "bid",
|
||||
"c": "last_trade_closed",
|
||||
"v": "volume",
|
||||
"p": "volume_weighted_average",
|
||||
"t": "number_of_trades",
|
||||
"l": "low",
|
||||
"h": "high",
|
||||
"o": "opening_price",
|
||||
}
|
||||
)
|
||||
response_dict: KrakenResponse = ticker_df.transpose().to_dict()
|
||||
return response_dict
|
||||
|
||||
async def _async_refresh_tradable_asset_pairs(self) -> None:
|
||||
self.tradable_asset_pairs = await self._hass.async_add_executor_job(
|
||||
get_tradable_asset_pairs, self._api
|
||||
)
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the Kraken integration."""
|
||||
if not self._config_entry.options:
|
||||
options = {
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_TRACKED_ASSET_PAIRS: [DEFAULT_TRACKED_ASSET_PAIR],
|
||||
}
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._config_entry, options=options
|
||||
)
|
||||
await self._async_refresh_tradable_asset_pairs()
|
||||
# Wait 1 second to avoid triggering the KrakenAPI CallRateLimiter
|
||||
await asyncio.sleep(CALL_RATE_LIMIT_SLEEP)
|
||||
self.coordinator = DataUpdateCoordinator(
|
||||
self._hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
config_entry=self._config_entry,
|
||||
update_method=self.async_update,
|
||||
update_interval=timedelta(
|
||||
seconds=self._config_entry.options[CONF_SCAN_INTERVAL]
|
||||
),
|
||||
)
|
||||
await self.coordinator.async_config_entry_first_refresh()
|
||||
# Wait 1 second to avoid triggering the KrakenAPI CallRateLimiter
|
||||
await asyncio.sleep(CALL_RATE_LIMIT_SLEEP)
|
||||
|
||||
def _get_websocket_name_asset_pairs(self) -> str:
|
||||
return ",".join(
|
||||
pair
|
||||
for tracked_pair in self._config_entry.options[CONF_TRACKED_ASSET_PAIRS]
|
||||
if (pair := self.tradable_asset_pairs.get(tracked_pair)) is not None
|
||||
)
|
||||
|
||||
def set_update_interval(self, update_interval: int) -> None:
|
||||
"""Set the coordinator update_interval to the supplied update_interval."""
|
||||
if self.coordinator is not None:
|
||||
self.coordinator.update_interval = timedelta(seconds=update_interval)
|
||||
|
||||
|
||||
async def async_options_updated(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Triggered by config entry options updates."""
|
||||
hass.data[DOMAIN].set_update_interval(config_entry.options[CONF_SCAN_INTERVAL])
|
||||
|
||||
133
homeassistant/components/kraken/coordinator.py
Normal file
133
homeassistant/components/kraken/coordinator.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""Coordinator for the kraken integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import krakenex
|
||||
import pykrakenapi
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_TRACKED_ASSET_PAIR,
|
||||
DOMAIN,
|
||||
KrakenResponse,
|
||||
)
|
||||
from .utils import get_tradable_asset_pairs
|
||||
|
||||
CALL_RATE_LIMIT_SLEEP = 1
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KrakenData:
|
||||
"""Define an object to hold kraken data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self._hass = hass
|
||||
self._config_entry = config_entry
|
||||
self._api = pykrakenapi.KrakenAPI(krakenex.API(), retry=0, crl_sleep=0)
|
||||
self.tradable_asset_pairs: dict[str, str] = {}
|
||||
self.coordinator: DataUpdateCoordinator[KrakenResponse | None] | None = None
|
||||
|
||||
async def async_update(self) -> KrakenResponse | None:
|
||||
"""Get the latest data from the Kraken.com REST API.
|
||||
|
||||
All tradeable asset pairs are retrieved, not the tracked asset pairs
|
||||
selected by the user. This enables us to check for an unknown and
|
||||
thus likely removed asset pair in sensor.py and only log a warning
|
||||
once.
|
||||
"""
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
return await self._hass.async_add_executor_job(self._get_kraken_data)
|
||||
except pykrakenapi.pykrakenapi.KrakenAPIError as error:
|
||||
if "Unknown asset pair" in str(error):
|
||||
_LOGGER.warning(
|
||||
"Kraken.com reported an unknown asset pair. Refreshing list of"
|
||||
" tradable asset pairs"
|
||||
)
|
||||
await self._async_refresh_tradable_asset_pairs()
|
||||
else:
|
||||
raise UpdateFailed(
|
||||
f"Unable to fetch data from Kraken.com: {error}"
|
||||
) from error
|
||||
except pykrakenapi.pykrakenapi.CallRateLimitError:
|
||||
_LOGGER.warning(
|
||||
"Exceeded the Kraken.com call rate limit. Increase the update interval"
|
||||
" to prevent this error"
|
||||
)
|
||||
return None
|
||||
|
||||
def _get_kraken_data(self) -> KrakenResponse:
|
||||
websocket_name_pairs = self._get_websocket_name_asset_pairs()
|
||||
ticker_df = self._api.get_ticker_information(websocket_name_pairs)
|
||||
# Rename columns to their full name
|
||||
ticker_df = ticker_df.rename(
|
||||
columns={
|
||||
"a": "ask",
|
||||
"b": "bid",
|
||||
"c": "last_trade_closed",
|
||||
"v": "volume",
|
||||
"p": "volume_weighted_average",
|
||||
"t": "number_of_trades",
|
||||
"l": "low",
|
||||
"h": "high",
|
||||
"o": "opening_price",
|
||||
}
|
||||
)
|
||||
response_dict: KrakenResponse = ticker_df.transpose().to_dict()
|
||||
return response_dict
|
||||
|
||||
async def _async_refresh_tradable_asset_pairs(self) -> None:
|
||||
self.tradable_asset_pairs = await self._hass.async_add_executor_job(
|
||||
get_tradable_asset_pairs, self._api
|
||||
)
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the Kraken integration."""
|
||||
if not self._config_entry.options:
|
||||
options = {
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_TRACKED_ASSET_PAIRS: [DEFAULT_TRACKED_ASSET_PAIR],
|
||||
}
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._config_entry, options=options
|
||||
)
|
||||
await self._async_refresh_tradable_asset_pairs()
|
||||
# Wait 1 second to avoid triggering the KrakenAPI CallRateLimiter
|
||||
await asyncio.sleep(CALL_RATE_LIMIT_SLEEP)
|
||||
self.coordinator = DataUpdateCoordinator(
|
||||
self._hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
config_entry=self._config_entry,
|
||||
update_method=self.async_update,
|
||||
update_interval=timedelta(
|
||||
seconds=self._config_entry.options[CONF_SCAN_INTERVAL]
|
||||
),
|
||||
)
|
||||
await self.coordinator.async_config_entry_first_refresh()
|
||||
# Wait 1 second to avoid triggering the KrakenAPI CallRateLimiter
|
||||
await asyncio.sleep(CALL_RATE_LIMIT_SLEEP)
|
||||
|
||||
def _get_websocket_name_asset_pairs(self) -> str:
|
||||
return ",".join(
|
||||
pair
|
||||
for tracked_pair in self._config_entry.options[CONF_TRACKED_ASSET_PAIRS]
|
||||
if (pair := self.tradable_asset_pairs.get(tracked_pair)) is not None
|
||||
)
|
||||
|
||||
def set_update_interval(self, update_interval: int) -> None:
|
||||
"""Set the coordinator update_interval to the supplied update_interval."""
|
||||
if self.coordinator is not None:
|
||||
self.coordinator.update_interval = timedelta(seconds=update_interval)
|
||||
@@ -22,13 +22,13 @@ from homeassistant.helpers.update_coordinator import (
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
|
||||
from . import KrakenData
|
||||
from .const import (
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
DISPATCH_CONFIG_UPDATED,
|
||||
DOMAIN,
|
||||
KrakenResponse,
|
||||
)
|
||||
from .coordinator import KrakenData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -2,61 +2,20 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TypedDict
|
||||
|
||||
from pylaunches import PyLaunches, PyLaunchesError
|
||||
from pylaunches.types import Launch, StarshipResponse
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .coordinator import LaunchLibraryCoordinator
|
||||
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
class LaunchLibraryData(TypedDict):
|
||||
"""Typed dict representation of data returned from pylaunches."""
|
||||
|
||||
upcoming_launches: list[Launch]
|
||||
starship_events: StarshipResponse
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up this integration using UI."""
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
launches = PyLaunches(session)
|
||||
|
||||
async def async_update() -> LaunchLibraryData:
|
||||
try:
|
||||
return LaunchLibraryData(
|
||||
upcoming_launches=await launches.launch_upcoming(
|
||||
filters={"limit": 1, "hide_recent_previous": "True"},
|
||||
),
|
||||
starship_events=await launches.dashboard_starship(),
|
||||
)
|
||||
except PyLaunchesError as ex:
|
||||
raise UpdateFailed(ex) from ex
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=async_update,
|
||||
update_interval=timedelta(hours=1),
|
||||
)
|
||||
|
||||
coordinator = LaunchLibraryCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
|
||||
60
homeassistant/components/launch_library/coordinator.py
Normal file
60
homeassistant/components/launch_library/coordinator.py
Normal file
@@ -0,0 +1,60 @@
|
||||
"""DataUpdateCoordinator for the launch_library integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import TypedDict
|
||||
|
||||
from pylaunches import PyLaunches, PyLaunchesError
|
||||
from pylaunches.types import Launch, StarshipResponse
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LaunchLibraryData(TypedDict):
|
||||
"""Typed dict representation of data returned from pylaunches."""
|
||||
|
||||
upcoming_launches: list[Launch]
|
||||
starship_events: StarshipResponse
|
||||
|
||||
|
||||
class LaunchLibraryCoordinator(DataUpdateCoordinator[LaunchLibraryData]):
|
||||
"""Class to manage fetching Launch Library data."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(hours=1),
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
self._launches = PyLaunches(session)
|
||||
|
||||
async def _async_update_data(self) -> LaunchLibraryData:
|
||||
"""Fetch data from Launch Library."""
|
||||
try:
|
||||
return LaunchLibraryData(
|
||||
upcoming_launches=await self._launches.launch_upcoming(
|
||||
filters={"limit": 1, "hide_recent_previous": "True"},
|
||||
),
|
||||
starship_events=await self._launches.dashboard_starship(),
|
||||
)
|
||||
except PyLaunchesError as ex:
|
||||
raise UpdateFailed(ex) from ex
|
||||
@@ -8,10 +8,9 @@ from pylaunches.types import Event, Launch
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from . import LaunchLibraryData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LaunchLibraryCoordinator
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -20,7 +19,7 @@ async def async_get_config_entry_diagnostics(
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
|
||||
coordinator: DataUpdateCoordinator[LaunchLibraryData] = hass.data[DOMAIN]
|
||||
coordinator: LaunchLibraryCoordinator = hass.data[DOMAIN]
|
||||
if coordinator.data is None:
|
||||
return {}
|
||||
|
||||
|
||||
@@ -19,14 +19,11 @@ from homeassistant.const import CONF_NAME, PERCENTAGE
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.dt import parse_datetime
|
||||
|
||||
from . import LaunchLibraryData
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LaunchLibraryCoordinator
|
||||
|
||||
DEFAULT_NEXT_LAUNCH_NAME = "Next launch"
|
||||
|
||||
@@ -126,7 +123,7 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
name = entry.data.get(CONF_NAME, DEFAULT_NEXT_LAUNCH_NAME)
|
||||
coordinator: DataUpdateCoordinator[LaunchLibraryData] = hass.data[DOMAIN]
|
||||
coordinator: LaunchLibraryCoordinator = hass.data[DOMAIN]
|
||||
|
||||
async_add_entities(
|
||||
LaunchLibrarySensor(
|
||||
@@ -139,9 +136,7 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class LaunchLibrarySensor(
|
||||
CoordinatorEntity[DataUpdateCoordinator[LaunchLibraryData]], SensorEntity
|
||||
):
|
||||
class LaunchLibrarySensor(CoordinatorEntity[LaunchLibraryCoordinator], SensorEntity):
|
||||
"""Representation of the next launch sensors."""
|
||||
|
||||
_attr_attribution = "Data provided by Launch Library."
|
||||
@@ -151,7 +146,7 @@ class LaunchLibrarySensor(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator[LaunchLibraryData],
|
||||
coordinator: LaunchLibraryCoordinator,
|
||||
entry_id: str,
|
||||
description: LaunchLibrarySensorEntityDescription,
|
||||
name: str,
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DOMAIN, MANUFACTURER, MODELS
|
||||
from .coordinator import LaundrifyConfigEntry, LaundrifyUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -47,7 +47,14 @@ class LaundrifyBaseSensor(SensorEntity):
|
||||
def __init__(self, device: LaundrifyDevice) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self._device = device
|
||||
self._attr_device_info = DeviceInfo(identifiers={(DOMAIN, device.id)})
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.id)},
|
||||
name=device.name,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=MODELS[device.model],
|
||||
sw_version=device.firmwareVersion,
|
||||
configuration_url=f"http://{device.internalIP}",
|
||||
)
|
||||
self._attr_unique_id = f"{device.id}_{self._attr_device_class}"
|
||||
|
||||
|
||||
|
||||
@@ -44,6 +44,9 @@
|
||||
},
|
||||
"started_mowing": {
|
||||
"trigger": "mdi:play"
|
||||
},
|
||||
"started_returning": {
|
||||
"trigger": "mdi:home-import-outline"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -139,6 +139,16 @@
|
||||
}
|
||||
},
|
||||
"name": "Lawn mower started mowing"
|
||||
},
|
||||
"started_returning": {
|
||||
"description": "Triggers after one or more lawn mowers start returning to dock.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::lawn_mower::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::lawn_mower::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "Lawn mower started returning to dock"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,9 @@ TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_mowing": make_entity_target_state_trigger(
|
||||
DOMAIN, LawnMowerActivity.MOWING
|
||||
),
|
||||
"started_returning": make_entity_target_state_trigger(
|
||||
DOMAIN, LawnMowerActivity.RETURNING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -18,3 +18,4 @@ docked: *trigger_common
|
||||
errored: *trigger_common
|
||||
paused_mowing: *trigger_common
|
||||
started_mowing: *trigger_common
|
||||
started_returning: *trigger_common
|
||||
|
||||
@@ -3,25 +3,20 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from led_ble import BLEAK_EXCEPTIONS, LEDBLE
|
||||
from led_ble import LEDBLE
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.components.bluetooth.match import ADDRESS, BluetoothCallbackMatcher
|
||||
from homeassistant.const import CONF_ADDRESS, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DEVICE_TIMEOUT, UPDATE_SECONDS
|
||||
from .models import LEDBLEConfigEntry, LEDBLEData
|
||||
from .const import DEVICE_TIMEOUT
|
||||
from .coordinator import LEDBLEConfigEntry, LEDBLECoordinator, LEDBLEData
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.LIGHT]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: LEDBLEConfigEntry) -> bool:
|
||||
"""Set up LED BLE from a config entry."""
|
||||
@@ -53,23 +48,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: LEDBLEConfigEntry) -> bo
|
||||
)
|
||||
)
|
||||
|
||||
async def _async_update() -> None:
|
||||
"""Update the device state."""
|
||||
try:
|
||||
await led_ble.update()
|
||||
except BLEAK_EXCEPTIONS as ex:
|
||||
raise UpdateFailed(str(ex)) from ex
|
||||
|
||||
startup_event = asyncio.Event()
|
||||
cancel_first_update = led_ble.register_callback(lambda *_: startup_event.set())
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=led_ble.name,
|
||||
update_method=_async_update,
|
||||
update_interval=timedelta(seconds=UPDATE_SECONDS),
|
||||
)
|
||||
coordinator = LEDBLECoordinator(hass, entry, led_ble)
|
||||
|
||||
try:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
58
homeassistant/components/led_ble/coordinator.py
Normal file
58
homeassistant/components/led_ble/coordinator.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""The LED BLE coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from led_ble import BLEAK_EXCEPTIONS, LEDBLE
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import UPDATE_SECONDS
|
||||
|
||||
type LEDBLEConfigEntry = ConfigEntry[LEDBLEData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class LEDBLEData:
|
||||
"""Data for the led ble integration."""
|
||||
|
||||
title: str
|
||||
device: LEDBLE
|
||||
coordinator: LEDBLECoordinator
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LEDBLECoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to manage fetching LED BLE data."""
|
||||
|
||||
config_entry: LEDBLEConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: LEDBLEConfigEntry,
|
||||
led_ble: LEDBLE,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=led_ble.name,
|
||||
update_interval=timedelta(seconds=UPDATE_SECONDS),
|
||||
)
|
||||
self.led_ble = led_ble
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update the device state."""
|
||||
try:
|
||||
await self.led_ble.update()
|
||||
except BLEAK_EXCEPTIONS as ex:
|
||||
raise UpdateFailed(str(ex)) from ex
|
||||
@@ -19,13 +19,10 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DEFAULT_EFFECT_SPEED
|
||||
from .models import LEDBLEConfigEntry
|
||||
from .coordinator import LEDBLEConfigEntry, LEDBLECoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -38,7 +35,7 @@ async def async_setup_entry(
|
||||
async_add_entities([LEDBLEEntity(data.coordinator, data.device, entry.title)])
|
||||
|
||||
|
||||
class LEDBLEEntity(CoordinatorEntity[DataUpdateCoordinator[None]], LightEntity):
|
||||
class LEDBLEEntity(CoordinatorEntity[LEDBLECoordinator], LightEntity):
|
||||
"""Representation of LEDBLE device."""
|
||||
|
||||
_attr_supported_color_modes = {ColorMode.RGB, ColorMode.WHITE}
|
||||
@@ -47,7 +44,7 @@ class LEDBLEEntity(CoordinatorEntity[DataUpdateCoordinator[None]], LightEntity):
|
||||
_attr_supported_features = LightEntityFeature.EFFECT
|
||||
|
||||
def __init__(
|
||||
self, coordinator: DataUpdateCoordinator[None], device: LEDBLE, name: str
|
||||
self, coordinator: LEDBLECoordinator, device: LEDBLE, name: str
|
||||
) -> None:
|
||||
"""Initialize an ledble light."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""The led ble integration models."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from led_ble import LEDBLE
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
type LEDBLEConfigEntry = ConfigEntry[LEDBLEData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class LEDBLEData:
|
||||
"""Data for the led ble integration."""
|
||||
|
||||
title: str
|
||||
device: LEDBLE
|
||||
coordinator: DataUpdateCoordinator[None]
|
||||
@@ -0,0 +1,38 @@
|
||||
"""Diagnostics support for Libre Hardware Monitor."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict, replace
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import LibreHardwareMonitorConfigEntry, LibreHardwareMonitorData
|
||||
|
||||
TO_REDACT = {CONF_USERNAME, CONF_PASSWORD}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, config_entry: LibreHardwareMonitorConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
lhm_data: LibreHardwareMonitorData = config_entry.runtime_data.data
|
||||
|
||||
return {
|
||||
"config_entry_data": {
|
||||
**async_redact_data(dict(config_entry.data), TO_REDACT),
|
||||
},
|
||||
"lhm_data": _as_dict(lhm_data),
|
||||
}
|
||||
|
||||
|
||||
def _as_dict(data: LibreHardwareMonitorData) -> dict[str, Any]:
|
||||
return asdict(
|
||||
replace(
|
||||
data,
|
||||
main_device_ids_and_names=dict(data.main_device_ids_and_names), # type: ignore[arg-type]
|
||||
sensor_data=dict(data.sensor_data), # type: ignore[arg-type]
|
||||
)
|
||||
)
|
||||
@@ -49,7 +49,7 @@ rules:
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
diagnostics: done
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["mastodon"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "gold",
|
||||
"requirements": ["Mastodon.py==2.1.2"]
|
||||
}
|
||||
|
||||
@@ -49,11 +49,11 @@ rules:
|
||||
Web service does not support discovery.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
|
||||
@@ -67,12 +67,21 @@ class MaxCubeClimate(ClimateEntity):
|
||||
"""MAX! Cube ClimateEntity."""
|
||||
|
||||
_attr_hvac_modes = [HVACMode.OFF, HVACMode.AUTO, HVACMode.HEAT]
|
||||
_attr_preset_modes = [
|
||||
PRESET_NONE,
|
||||
PRESET_BOOST,
|
||||
PRESET_COMFORT,
|
||||
PRESET_ECO,
|
||||
PRESET_AWAY,
|
||||
PRESET_ON,
|
||||
]
|
||||
_attr_supported_features = (
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
def __init__(self, handler, device):
|
||||
"""Initialize MAX! Cube ClimateEntity."""
|
||||
@@ -80,17 +89,7 @@ class MaxCubeClimate(ClimateEntity):
|
||||
self._attr_name = f"{room.name} {device.name}"
|
||||
self._cubehandle = handler
|
||||
self._device = device
|
||||
self._attr_should_poll = True
|
||||
self._attr_unique_id = self._device.serial
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
self._attr_preset_modes = [
|
||||
PRESET_NONE,
|
||||
PRESET_BOOST,
|
||||
PRESET_COMFORT,
|
||||
PRESET_ECO,
|
||||
PRESET_AWAY,
|
||||
PRESET_ON,
|
||||
]
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
@@ -106,7 +105,7 @@ class MaxCubeClimate(ClimateEntity):
|
||||
return self._device.max_temperature or MAX_TEMPERATURE
|
||||
|
||||
@property
|
||||
def current_temperature(self):
|
||||
def current_temperature(self) -> float:
|
||||
"""Return the current temperature."""
|
||||
return self._device.actual_temperature
|
||||
|
||||
@@ -176,7 +175,7 @@ class MaxCubeClimate(ClimateEntity):
|
||||
return HVACAction.OFF if self.hvac_mode == HVACMode.OFF else HVACAction.IDLE
|
||||
|
||||
@property
|
||||
def target_temperature(self):
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
temp = self._device.target_temperature
|
||||
if temp is None or temp < self.min_temp or temp > self.max_temp:
|
||||
|
||||
@@ -617,11 +617,11 @@ class OvenProgramId(MieleEnum, missing_to_none=True):
|
||||
evaporate_water = 327
|
||||
shabbat_program = 335
|
||||
yom_tov = 336
|
||||
drying = 357
|
||||
drying = 357, 2028
|
||||
heat_crockery = 358
|
||||
prove_dough = 359
|
||||
prove_dough = 359, 2023
|
||||
low_temperature_cooking = 360
|
||||
steam_cooking = 361
|
||||
steam_cooking = 8, 361
|
||||
keeping_warm = 362
|
||||
apple_sponge = 364
|
||||
apple_pie = 365
|
||||
@@ -668,9 +668,9 @@ class OvenProgramId(MieleEnum, missing_to_none=True):
|
||||
saddle_of_roebuck = 456
|
||||
salmon_fillet = 461
|
||||
potato_cheese_gratin = 464
|
||||
trout = 486
|
||||
carp = 491
|
||||
salmon_trout = 492
|
||||
trout = 486, 2224
|
||||
carp = 491, 2233
|
||||
salmon_trout = 492, 2241
|
||||
springform_tin_15cm = 496
|
||||
springform_tin_20cm = 497
|
||||
springform_tin_25cm = 498
|
||||
@@ -736,137 +736,15 @@ class OvenProgramId(MieleEnum, missing_to_none=True):
|
||||
pork_belly = 701
|
||||
pikeperch_fillet_with_vegetables = 702
|
||||
steam_bake = 99001
|
||||
|
||||
|
||||
class DishWarmerProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for dish warmers."""
|
||||
|
||||
no_program = 0, -1
|
||||
warm_cups_glasses = 1
|
||||
warm_dishes_plates = 2
|
||||
keep_warm = 3
|
||||
slow_roasting = 4
|
||||
|
||||
|
||||
class RobotVacuumCleanerProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for robot vacuum cleaners."""
|
||||
|
||||
no_program = 0, -1
|
||||
auto = 1
|
||||
spot = 2
|
||||
turbo = 3
|
||||
silent = 4
|
||||
|
||||
|
||||
class CoffeeSystemProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for coffee systems."""
|
||||
|
||||
no_program = 0, -1
|
||||
|
||||
check_appliance = 17004
|
||||
|
||||
# profile 1
|
||||
ristretto = 24000, 24032, 24064, 24096, 24128
|
||||
espresso = 24001, 24033, 24065, 24097, 24129
|
||||
coffee = 24002, 24034, 24066, 24098, 24130
|
||||
long_coffee = 24003, 24035, 24067, 24099, 24131
|
||||
cappuccino = 24004, 24036, 24068, 24100, 24132
|
||||
cappuccino_italiano = 24005, 24037, 24069, 24101, 24133
|
||||
latte_macchiato = 24006, 24038, 24070, 24102, 24134
|
||||
espresso_macchiato = 24007, 24039, 24071, 24135
|
||||
cafe_au_lait = 24008, 24040, 24072, 24104, 24136
|
||||
caffe_latte = 24009, 24041, 24073, 24105, 24137
|
||||
flat_white = 24012, 24044, 24076, 24108, 24140
|
||||
very_hot_water = 24013, 24045, 24077, 24109, 24141
|
||||
hot_water = 24014, 24046, 24078, 24110, 24142
|
||||
hot_milk = 24015, 24047, 24079, 24111, 24143
|
||||
milk_foam = 24016, 24048, 24080, 24112, 24144
|
||||
black_tea = 24017, 24049, 24081, 24113, 24145
|
||||
herbal_tea = 24018, 24050, 24082, 24114, 24146
|
||||
fruit_tea = 24019, 24051, 24083, 24115, 24147
|
||||
green_tea = 24020, 24052, 24084, 24116, 24148
|
||||
white_tea = 24021, 24053, 24085, 24117, 24149
|
||||
japanese_tea = 24022, 29054, 24086, 24118, 24150
|
||||
# special programs
|
||||
coffee_pot = 24400
|
||||
barista_assistant = 24407
|
||||
# machine settings menu
|
||||
appliance_settings = (
|
||||
16016, # display brightness
|
||||
16018, # volume
|
||||
16019, # buttons volume
|
||||
16020, # child lock
|
||||
16021, # water hardness
|
||||
16027, # welcome sound
|
||||
16033, # connection status
|
||||
16035, # remote control
|
||||
16037, # remote update
|
||||
24500, # total dispensed
|
||||
24502, # lights appliance on
|
||||
24503, # lights appliance off
|
||||
24504, # turn off lights after
|
||||
24506, # altitude
|
||||
24513, # performance mode
|
||||
24516, # turn off after
|
||||
24537, # advanced mode
|
||||
24542, # tea timer
|
||||
24549, # total coffee dispensed
|
||||
24550, # total tea dispensed
|
||||
24551, # total ristretto
|
||||
24552, # total cappuccino
|
||||
24553, # total espresso
|
||||
24554, # total coffee
|
||||
24555, # total long coffee
|
||||
24556, # total italian cappuccino
|
||||
24557, # total latte macchiato
|
||||
24558, # total caffe latte
|
||||
24560, # total espresso macchiato
|
||||
24562, # total flat white
|
||||
24563, # total coffee with milk
|
||||
24564, # total black tea
|
||||
24565, # total herbal tea
|
||||
24566, # total fruit tea
|
||||
24567, # total green tea
|
||||
24568, # total white tea
|
||||
24569, # total japanese tea
|
||||
24571, # total milk foam
|
||||
24572, # total hot milk
|
||||
24573, # total hot water
|
||||
24574, # total very hot water
|
||||
24575, # counter to descaling
|
||||
24576, # counter to brewing unit degreasing
|
||||
24800, # maintenance
|
||||
24801, # profiles settings menu
|
||||
24813, # add profile
|
||||
)
|
||||
appliance_rinse = 24750, 24759, 24773, 24787, 24788
|
||||
intermediate_rinsing = 24758
|
||||
automatic_maintenance = 24778
|
||||
descaling = 24751
|
||||
brewing_unit_degrease = 24753
|
||||
milk_pipework_rinse = 24754
|
||||
milk_pipework_clean = 24789
|
||||
|
||||
|
||||
class SteamOvenMicroProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for steam oven micro combo."""
|
||||
|
||||
no_program = 0, -1
|
||||
steam_cooking = 8
|
||||
microwave = 19
|
||||
popcorn = 53
|
||||
quick_mw = 54
|
||||
sous_vide = 72
|
||||
eco_steam_cooking = 75
|
||||
rapid_steam_cooking = 77
|
||||
descale = 326
|
||||
menu_cooking = 330
|
||||
reheating_with_steam = 2018
|
||||
defrosting_with_steam = 2019
|
||||
blanching = 2020
|
||||
bottling = 2021
|
||||
sterilize_crockery = 2022
|
||||
prove_dough = 2023
|
||||
soak = 2027
|
||||
reheating_with_microwave = 2029
|
||||
defrosting_with_microwave = 2030
|
||||
@@ -1020,18 +898,15 @@ class SteamOvenMicroProgramId(MieleEnum, missing_to_none=True):
|
||||
gilt_head_bream_fillet = 2220
|
||||
codfish_piece = 2221, 2232
|
||||
codfish_fillet = 2222, 2231
|
||||
trout = 2224
|
||||
pike_fillet = 2225
|
||||
pike_piece = 2226
|
||||
halibut_fillet_2_cm = 2227
|
||||
halibut_fillet_3_cm = 2230
|
||||
carp = 2233
|
||||
salmon_fillet_2_cm = 2234
|
||||
salmon_fillet_3_cm = 2235
|
||||
salmon_steak_2_cm = 2238
|
||||
salmon_steak_3_cm = 2239
|
||||
salmon_piece = 2240
|
||||
salmon_trout = 2241
|
||||
iridescent_shark_fillet = 2244
|
||||
red_snapper_fillet_2_cm = 2245
|
||||
red_snapper_fillet_3_cm = 2248
|
||||
@@ -1268,6 +1143,116 @@ class SteamOvenMicroProgramId(MieleEnum, missing_to_none=True):
|
||||
round_grain_rice_general_rapid_steam_cooking = 3411
|
||||
|
||||
|
||||
class DishWarmerProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for dish warmers."""
|
||||
|
||||
no_program = 0, -1
|
||||
warm_cups_glasses = 1
|
||||
warm_dishes_plates = 2
|
||||
keep_warm = 3
|
||||
slow_roasting = 4
|
||||
|
||||
|
||||
class RobotVacuumCleanerProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for robot vacuum cleaners."""
|
||||
|
||||
no_program = 0, -1
|
||||
auto = 1
|
||||
spot = 2
|
||||
turbo = 3
|
||||
silent = 4
|
||||
|
||||
|
||||
class CoffeeSystemProgramId(MieleEnum, missing_to_none=True):
|
||||
"""Program Id codes for coffee systems."""
|
||||
|
||||
no_program = 0, -1
|
||||
|
||||
check_appliance = 17004
|
||||
|
||||
# profile 1
|
||||
ristretto = 24000, 24032, 24064, 24096, 24128
|
||||
espresso = 24001, 24033, 24065, 24097, 24129
|
||||
coffee = 24002, 24034, 24066, 24098, 24130
|
||||
long_coffee = 24003, 24035, 24067, 24099, 24131
|
||||
cappuccino = 24004, 24036, 24068, 24100, 24132
|
||||
cappuccino_italiano = 24005, 24037, 24069, 24101, 24133
|
||||
latte_macchiato = 24006, 24038, 24070, 24102, 24134
|
||||
espresso_macchiato = 24007, 24039, 24071, 24135
|
||||
cafe_au_lait = 24008, 24040, 24072, 24104, 24136
|
||||
caffe_latte = 24009, 24041, 24073, 24105, 24137
|
||||
flat_white = 24012, 24044, 24076, 24108, 24140
|
||||
very_hot_water = 24013, 24045, 24077, 24109, 24141
|
||||
hot_water = 24014, 24046, 24078, 24110, 24142
|
||||
hot_milk = 24015, 24047, 24079, 24111, 24143
|
||||
milk_foam = 24016, 24048, 24080, 24112, 24144
|
||||
black_tea = 24017, 24049, 24081, 24113, 24145
|
||||
herbal_tea = 24018, 24050, 24082, 24114, 24146
|
||||
fruit_tea = 24019, 24051, 24083, 24115, 24147
|
||||
green_tea = 24020, 24052, 24084, 24116, 24148
|
||||
white_tea = 24021, 24053, 24085, 24117, 24149
|
||||
japanese_tea = 24022, 29054, 24086, 24118, 24150
|
||||
# special programs
|
||||
coffee_pot = 24400
|
||||
barista_assistant = 24407
|
||||
# machine settings menu
|
||||
appliance_settings = (
|
||||
16016, # display brightness
|
||||
16018, # volume
|
||||
16019, # buttons volume
|
||||
16020, # child lock
|
||||
16021, # water hardness
|
||||
16027, # welcome sound
|
||||
16033, # connection status
|
||||
16035, # remote control
|
||||
16037, # remote update
|
||||
24500, # total dispensed
|
||||
24502, # lights appliance on
|
||||
24503, # lights appliance off
|
||||
24504, # turn off lights after
|
||||
24506, # altitude
|
||||
24513, # performance mode
|
||||
24516, # turn off after
|
||||
24537, # advanced mode
|
||||
24542, # tea timer
|
||||
24549, # total coffee dispensed
|
||||
24550, # total tea dispensed
|
||||
24551, # total ristretto
|
||||
24552, # total cappuccino
|
||||
24553, # total espresso
|
||||
24554, # total coffee
|
||||
24555, # total long coffee
|
||||
24556, # total italian cappuccino
|
||||
24557, # total latte macchiato
|
||||
24558, # total caffe latte
|
||||
24560, # total espresso macchiato
|
||||
24562, # total flat white
|
||||
24563, # total coffee with milk
|
||||
24564, # total black tea
|
||||
24565, # total herbal tea
|
||||
24566, # total fruit tea
|
||||
24567, # total green tea
|
||||
24568, # total white tea
|
||||
24569, # total japanese tea
|
||||
24571, # total milk foam
|
||||
24572, # total hot milk
|
||||
24573, # total hot water
|
||||
24574, # total very hot water
|
||||
24575, # counter to descaling
|
||||
24576, # counter to brewing unit degreasing
|
||||
24800, # maintenance
|
||||
24801, # profiles settings menu
|
||||
24813, # add profile
|
||||
)
|
||||
appliance_rinse = 24750, 24759, 24773, 24787, 24788
|
||||
intermediate_rinsing = 24758
|
||||
automatic_maintenance = 24778
|
||||
descaling = 24751
|
||||
brewing_unit_degrease = 24753
|
||||
milk_pipework_rinse = 24754
|
||||
milk_pipework_clean = 24789
|
||||
|
||||
|
||||
PROGRAM_IDS: dict[int, type[MieleEnum]] = {
|
||||
MieleAppliance.WASHING_MACHINE: WashingMachineProgramId,
|
||||
MieleAppliance.TUMBLE_DRYER: TumbleDryerProgramId,
|
||||
@@ -1278,7 +1263,7 @@ PROGRAM_IDS: dict[int, type[MieleEnum]] = {
|
||||
MieleAppliance.STEAM_OVEN_MK2: OvenProgramId,
|
||||
MieleAppliance.STEAM_OVEN: OvenProgramId,
|
||||
MieleAppliance.STEAM_OVEN_COMBI: OvenProgramId,
|
||||
MieleAppliance.STEAM_OVEN_MICRO: SteamOvenMicroProgramId,
|
||||
MieleAppliance.STEAM_OVEN_MICRO: OvenProgramId,
|
||||
MieleAppliance.WASHER_DRYER: WashingMachineProgramId,
|
||||
MieleAppliance.ROBOT_VACUUM_CLEANER: RobotVacuumCleanerProgramId,
|
||||
MieleAppliance.COFFEE_SYSTEM: CoffeeSystemProgramId,
|
||||
|
||||
@@ -474,6 +474,7 @@
|
||||
"drain_spin": "Drain/spin",
|
||||
"drop_cookies_1_tray": "Drop cookies (1 tray)",
|
||||
"drop_cookies_2_trays": "Drop cookies (2 trays)",
|
||||
"drying": "Drying",
|
||||
"duck": "Duck",
|
||||
"dutch_hash": "Dutch hash",
|
||||
"easy_care": "Easy care",
|
||||
|
||||
@@ -1,37 +1,18 @@
|
||||
"""The Mullvad VPN integration."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from mullvad_api import MullvadAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MullvadCoordinator
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Mullvad VPN integration."""
|
||||
|
||||
async def async_get_mullvad_api_data():
|
||||
async with asyncio.timeout(10):
|
||||
api = await hass.async_add_executor_job(MullvadAPI)
|
||||
return api.data
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
logging.getLogger(__name__),
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=async_get_mullvad_api_data,
|
||||
update_interval=timedelta(minutes=1),
|
||||
)
|
||||
coordinator = MullvadCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data[DOMAIN] = coordinator
|
||||
|
||||
@@ -9,12 +9,10 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import MullvadCoordinator
|
||||
|
||||
BINARY_SENSORS = (
|
||||
BinarySensorEntityDescription(
|
||||
@@ -39,14 +37,14 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class MullvadBinarySensor(CoordinatorEntity, BinarySensorEntity):
|
||||
class MullvadBinarySensor(CoordinatorEntity[MullvadCoordinator], BinarySensorEntity):
|
||||
"""Represents a Mullvad binary sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator,
|
||||
coordinator: MullvadCoordinator,
|
||||
entity_description: BinarySensorEntityDescription,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
|
||||
38
homeassistant/components/mullvad/coordinator.py
Normal file
38
homeassistant/components/mullvad/coordinator.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""The Mullvad VPN coordinator."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from mullvad_api import MullvadAPI
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MullvadCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Mullvad VPN data update coordinator."""
|
||||
|
||||
config_entry: ConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Initialize the Mullvad coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=1),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> dict[str, Any]:
|
||||
"""Fetch data from Mullvad API."""
|
||||
async with asyncio.timeout(10):
|
||||
api = await self.hass.async_add_executor_job(MullvadAPI)
|
||||
return api.data
|
||||
@@ -24,7 +24,7 @@ SUBENTRY_TYPE_ZONE = "zone"
|
||||
|
||||
# Defaults
|
||||
DEFAULT_PORT = 4999
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(minutes=1)
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(seconds=5)
|
||||
DEFAULT_INFER_ARMING_STATE = False
|
||||
DEFAULT_ZONE_TYPE = BinarySensorDeviceClass.MOTION
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user