Compare commits

..

2 Commits

Author SHA1 Message Date
57263bb65a goe_charger cleanups and improvements 2021-09-15 21:25:23 +02:00
86533e3599 Added goe_charger 2021-09-13 12:27:15 +02:00
26443 changed files with 760878 additions and 1906981 deletions

View File

@@ -1,158 +0,0 @@
# Defines a list of files that are part of main core of Home Assistant.
# Changes to these files/filters define how our CI test suite is ran.
core: &core
- homeassistant/*.py
- homeassistant/auth/**
- homeassistant/helpers/**
- homeassistant/package_constraints.txt
- homeassistant/util/**
- pyproject.toml
- requirements.txt
- setup.cfg
# Our base platforms, that are used by other integrations
base_platforms: &base_platforms
- homeassistant/components/air_quality/**
- homeassistant/components/alarm_control_panel/**
- homeassistant/components/binary_sensor/**
- homeassistant/components/button/**
- homeassistant/components/calendar/**
- homeassistant/components/camera/**
- homeassistant/components/climate/**
- homeassistant/components/cover/**
- homeassistant/components/date/**
- homeassistant/components/datetime/**
- homeassistant/components/device_tracker/**
- homeassistant/components/diagnostics/**
- homeassistant/components/event/**
- homeassistant/components/fan/**
- homeassistant/components/geo_location/**
- homeassistant/components/humidifier/**
- homeassistant/components/image/**
- homeassistant/components/image_processing/**
- homeassistant/components/lawn_mower/**
- homeassistant/components/light/**
- homeassistant/components/lock/**
- homeassistant/components/media_player/**
- homeassistant/components/notify/**
- homeassistant/components/number/**
- homeassistant/components/remote/**
- homeassistant/components/scene/**
- homeassistant/components/select/**
- homeassistant/components/sensor/**
- homeassistant/components/siren/**
- homeassistant/components/stt/**
- homeassistant/components/switch/**
- homeassistant/components/text/**
- homeassistant/components/time/**
- homeassistant/components/todo/**
- homeassistant/components/tts/**
- homeassistant/components/update/**
- homeassistant/components/vacuum/**
- homeassistant/components/water_heater/**
- homeassistant/components/weather/**
# Extra components that trigger the full suite
components: &components
- homeassistant/components/alexa/**
- homeassistant/components/application_credentials/**
- homeassistant/components/assist_pipeline/**
- homeassistant/components/auth/**
- homeassistant/components/automation/**
- homeassistant/components/backup/**
- homeassistant/components/bluetooth/**
- homeassistant/components/cloud/**
- homeassistant/components/config/**
- homeassistant/components/configurator/**
- homeassistant/components/conversation/**
- homeassistant/components/demo/**
- homeassistant/components/device_automation/**
- homeassistant/components/dhcp/**
- homeassistant/components/discovery/**
- homeassistant/components/energy/**
- homeassistant/components/ffmpeg/**
- homeassistant/components/frontend/**
- homeassistant/components/google_assistant/**
- homeassistant/components/group/**
- homeassistant/components/hassio/**
- homeassistant/components/homeassistant/**
- homeassistant/components/http/**
- homeassistant/components/image/**
- homeassistant/components/input_boolean/**
- homeassistant/components/input_button/**
- homeassistant/components/input_datetime/**
- homeassistant/components/input_number/**
- homeassistant/components/input_select/**
- homeassistant/components/input_text/**
- homeassistant/components/logbook/**
- homeassistant/components/logger/**
- homeassistant/components/lovelace/**
- homeassistant/components/media_source/**
- homeassistant/components/mjpeg/**
- homeassistant/components/modbus/**
- homeassistant/components/mqtt/**
- homeassistant/components/network/**
- homeassistant/components/onboarding/**
- homeassistant/components/otp/**
- homeassistant/components/persistent_notification/**
- homeassistant/components/person/**
- homeassistant/components/recorder/**
- homeassistant/components/recovery_mode/**
- homeassistant/components/repairs/**
- homeassistant/components/script/**
- homeassistant/components/shopping_list/**
- homeassistant/components/ssdp/**
- homeassistant/components/stream/**
- homeassistant/components/sun/**
- homeassistant/components/system_health/**
- homeassistant/components/tag/**
- homeassistant/components/template/**
- homeassistant/components/timer/**
- homeassistant/components/usb/**
- homeassistant/components/webhook/**
- homeassistant/components/websocket_api/**
- homeassistant/components/zeroconf/**
- homeassistant/components/zone/**
# Testing related files that affect the whole test/linting suite
tests: &tests
- codecov.yaml
- pylint/**
- requirements_test_pre_commit.txt
- requirements_test.txt
- tests/auth/**
- tests/backports/**
- tests/common.py
- tests/components/history/**
- tests/components/logbook/**
- tests/components/recorder/**
- tests/components/sensor/**
- tests/conftest.py
- tests/hassfest/**
- tests/helpers/**
- tests/ignore_uncaught_exceptions.py
- tests/mock/**
- tests/pylint/**
- tests/scripts/**
- tests/syrupy.py
- tests/test_util/**
- tests/testing_config/**
- tests/util/**
other: &other
- .github/workflows/**
- homeassistant/scripts/**
requirements: &requirements
- .github/workflows/**
- homeassistant/package_constraints.txt
- requirements*.txt
- pyproject.toml
any:
- *base_platforms
- *components
- *core
- *other
- *requirements
- *tests

File diff suppressed because it is too large Load Diff

View File

@@ -5,46 +5,38 @@
"postCreateCommand": "script/setup",
"postStartCommand": "script/bootstrap",
"containerEnv": { "DEVCONTAINER": "1" },
// Port 5683 udp is used by Shelly integration
"appPort": ["8123:8123", "5683:5683/udp"],
"appPort": 8123,
"runArgs": ["-e", "GIT_EDITOR=code --wait"],
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github"
],
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
"settings": {
"python.pythonPath": "/usr/local/bin/python",
"python.testing.pytestArgs": ["--no-cov"],
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"yaml.customTags": [
"!input scalar",
"!secret scalar",
"!include_dir_named scalar",
"!include_dir_list scalar",
"!include_dir_merge_list scalar",
"!include_dir_merge_named scalar"
],
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
"extensions": [
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode"
],
// Please keep this file in sync with settings in home-assistant/.vscode/settings.default.json
"settings": {
"python.pythonPath": "/usr/local/bin/python",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.testing.pytestArgs": ["--no-cov"],
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"yaml.customTags": [
"!input scalar",
"!secret scalar",
"!include_dir_named scalar",
"!include_dir_list scalar",
"!include_dir_merge_list scalar",
"!include_dir_merge_named scalar"
]
}
}

View File

@@ -9,6 +9,7 @@ docs
.vscode
# Test related files
.tox
tests
# Other virtualization methods

1
.gitattributes vendored
View File

@@ -8,6 +8,5 @@
*.png binary
*.zip binary
*.mp3 binary
*.pcm binary
Dockerfile.dev linguist-language=Dockerfile

View File

@@ -16,7 +16,7 @@
<!--
Provide details about the versions you are using, which helps us to reproduce
and find the issue quicker. Version information is found in the
Home Assistant frontend: Settings -> About.
Home Assistant frontend: Configuration -> Info.
-->
- Home Assistant Core release with the issue:

View File

@@ -15,7 +15,7 @@ body:
attributes:
label: The problem
description: >-
Describe the issue you are experiencing here, to communicate to the
Describe the issue you are experiencing here to communicate to the
maintainers. Tell us what you were trying to do and what happened.
Provide a clear and concise description of what the problem is.
@@ -28,12 +28,10 @@ body:
validations:
required: true
attributes:
label: What version of Home Assistant Core has the issue?
label: What is version of Home Assistant Core has the issue?
placeholder: core-
description: >
Can be found in: [Settings ⇒ System ⇒ Repairs ⇒ Three Dots in Upper Right ⇒ System information](https://my.home-assistant.io/redirect/system_health/).
[![Open your Home Assistant instance and show the system information.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
Can be found in the Configuration panel -> Info.
- type: input
attributes:
label: What was the last working version of Home Assistant Core?
@@ -46,9 +44,7 @@ body:
attributes:
label: What type of installation are you running?
description: >
Can be found in: [Settings ⇒ System ⇒ Repairs ⇒ Three Dots in Upper Right ⇒ System information](https://my.home-assistant.io/redirect/system_health/).
[![Open your Home Assistant instance and show the system information.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
If you don't know, you can find it in: Configuration panel -> Info.
options:
- Home Assistant OS
- Home Assistant Container
@@ -59,15 +55,15 @@ body:
attributes:
label: Integration causing the issue
description: >
The name of the integration, for example Automation or Philips Hue.
The name of the integration, for example, Automation or Philips Hue.
- type: input
id: integration_link
attributes:
label: Link to integration documentation on our website
placeholder: "https://www.home-assistant.io/integrations/..."
description: |
Providing a link [to the documentation][docs] helps us categorize the issue and might speed up the
investigation by automatically informing a contributor, while also providing a useful reference for others.
Providing a link [to the documentation][docs] help us categorizing the
issue, while providing a useful reference at the same time.
[docs]: https://www.home-assistant.io/integrations
@@ -75,23 +71,12 @@ body:
attributes:
value: |
# Details
- type: textarea
attributes:
label: Diagnostics information
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >-
Many integrations provide the ability to [download diagnostic data](https://www.home-assistant.io/docs/configuration/troubleshooting/#debug-logs-and-diagnostics).
**It would really help if you could download the diagnostics data for the device you are having issues with,
and <ins>drag-and-drop that file into the textbox below.</ins>**
It generally allows pinpointing defects and thus resolving issues faster.
- type: textarea
attributes:
label: Example YAML snippet
description: |
If applicable, please provide an example piece of YAML that can help reproduce this problem.
This can be from an automation, script, scene or configuration.
If this issue has an example piece of YAML that can help reproducing this problem, please provide.
This can be an piece of YAML from, e.g., an automation, script, scene or configuration.
render: yaml
- type: textarea
attributes:
@@ -103,3 +88,5 @@ body:
label: Additional information
description: >
If you have any additional information for us, use the field below.
Please note, you can attach screenshots or screen recordings here, by
dragging and dropping files in the field below.

View File

@@ -1,6 +1,6 @@
blank_issues_enabled: false
contact_links:
- name: Report a bug with the UI, Frontend or Dashboards
- name: Report a bug with the UI, Frontend or Lovelace
url: https://github.com/home-assistant/frontend/issues
about: This is the issue tracker for our backend. Please report issues with the UI in the frontend repository.
- name: Report incorrect or missing information on our website

View File

@@ -33,7 +33,6 @@
- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New integration (thank you!)
- [ ] New feature (which adds functionality to an existing integration)
- [ ] Deprecation (breaking change to happen in the future)
- [ ] Breaking change (fix/feature causing existing functionality to break)
- [ ] Code quality improvements to existing code or addition of tests
@@ -59,8 +58,7 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] I have followed the [perfect PR recommendations][perfect-pr]
- [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`)
- [ ] Tests have been added to verify that the new code works.
If user exposed functionality or configuration variables are added/changed:
@@ -76,6 +74,18 @@ If the code communicates with devices, web services, or third-party tools:
- [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description.
- [ ] Untested files have been added to `.coveragerc`.
The integration reached or maintains the following [Integration Quality Scale][quality-scale]:
<!--
The Integration Quality Scale scores an integration on the code quality
and user experience. Each level of the quality scale consists of a list
of requirements. We highly recommend getting your integration scored!
-->
- [ ] No score or internal
- [ ] 🥈 Silver
- [ ] 🥇 Gold
- [ ] 🏆 Platinum
<!--
This project is very active and we have a high turnover of pull requests.
@@ -97,15 +107,14 @@ To help with the load of incoming pull requests:
- [ ] I have reviewed two other [open pull requests][prs] in this repository.
[prs]: https://github.com/home-assistant/core/pulls?q=is%3Aopen+is%3Apr+-author%3A%40me+-draft%3Atrue+-label%3Awaiting-for-upstream+sort%3Acreated-desc+review%3Anone+-status%3Afailure
[prs]: https://github.com/home-assistant/core/pulls?q=is%3Aopen+is%3Apr+-author%3A%40me+-draft%3Atrue+-label%3Awaiting-for-upstream+sort%3Acreated-desc+review%3Anone
<!--
Thank you for contributing <3
Below, some useful links you could explore:
-->
[dev-checklist]: https://developers.home-assistant.io/docs/development_checklist/
[manifest-docs]: https://developers.home-assistant.io/docs/creating_integration_manifest/
[quality-scale]: https://developers.home-assistant.io/docs/integration_quality_scale_index/
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[manifest-docs]: https://developers.home-assistant.io/docs/en/creating_integration_manifest.html
[quality-scale]: https://developers.home-assistant.io/docs/en/next/integration_quality_scale_index.html
[docs-repository]: https://github.com/home-assistant/home-assistant.io
[perfect-pr]: https://developers.home-assistant.io/docs/review-process/#creating-the-perfect-pr

Binary file not shown.

Before

Width:  |  Height:  |  Size: 65 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 115 KiB

13
.github/move.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
# Configuration for move-issues - https://github.com/dessant/move-issues
# Delete the command comment. Ignored when the comment also contains other content
deleteCommand: true
# Close the source issue after moving
closeSourceIssue: true
# Lock the source issue after moving
lockSourceIssue: false
# Set custom aliases for targets
# aliases:
# r: repo
# or: owner/repo

View File

@@ -10,13 +10,11 @@ on:
env:
BUILD_TYPE: core
DEFAULT_PYTHON: "3.12"
PIP_TIMEOUT: 60
DEFAULT_PYTHON: 3.8
jobs:
init:
name: Initialize build
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
outputs:
architectures: ${{ steps.info.outputs.architectures }}
@@ -25,12 +23,12 @@ jobs:
publish: ${{ steps.version.outputs.publish }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
with:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v2.2.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -49,33 +47,38 @@ jobs:
with:
ignore-dev: true
- name: Generate meta info
shell: bash
run: |
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > OFFICIAL_IMAGE
- name: Signing meta info file
uses: home-assistant/actions/helpers/codenotary@master
with:
source: file://${{ github.workspace }}/OFFICIAL_IMAGE
user: ${{ secrets.VCN_USER }}
password: ${{ secrets.VCN_PASSWORD }}
organisation: home-assistant.io
build_python:
name: Build PyPi package
environment: ${{ needs.init.outputs.channel }}
needs: ["init", "build_base"]
needs: init
runs-on: ubuntu-latest
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
if: needs.init.outputs.publish == 'true'
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v2.2.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download Translations
run: python3 -m script.translations download
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
- name: Build package
shell: bash
run: |
# Remove dist, build, and homeassistant.egg-info
# when build locally for testing!
pip install twine build
python -m build
pip install twine wheel
python setup.py sdist bdist_wheel
- name: Upload package
shell: bash
@@ -87,45 +90,18 @@ jobs:
build_base:
name: Build ${{ matrix.arch }} base core image
if: github.repository_owner == 'home-assistant'
needs: init
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
strategy:
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v3.1.2
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/frontend
branch: dev
workflow: nightly.yaml
workflow_conclusion: success
name: wheels
- name: Download nightly wheels of intents
if: needs.init.outputs.channel == 'dev'
uses: dawidd6/action-download-artifact@v3.1.2
with:
github_token: ${{secrets.GITHUB_TOKEN}}
repo: home-assistant/intents-package
branch: main
workflow: nightly.yaml
workflow_conclusion: success
name: package
uses: actions/checkout@v2.3.4
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@v5.0.0
uses: actions/setup-python@v2.2.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -133,109 +109,44 @@ jobs:
if: needs.init.outputs.channel == 'dev'
shell: bash
run: |
python3 -m pip install packaging tomli
python3 -m pip install packaging
python3 -m pip install .
version="$(python3 script/version_bump.py nightly)"
if [[ "$(ls home_assistant_frontend*.whl)" =~ ^home_assistant_frontend-(.*)-py3-none-any.whl$ ]]; then
echo "Found frontend wheel, setting version to: ${BASH_REMATCH[1]}"
frontend_version="${BASH_REMATCH[1]}" yq \
--inplace e -o json \
'.requirements = ["home-assistant-frontend=="+env(frontend_version)]' \
homeassistant/components/frontend/manifest.json
sed -i "s|home-assistant-frontend==.*|home-assistant-frontend==${BASH_REMATCH[1]}|" \
homeassistant/package_constraints.txt
python -m script.gen_requirements_all
fi
if [[ "$(ls home_assistant_intents*.whl)" =~ ^home_assistant_intents-(.*)-py3-none-any.whl$ ]]; then
echo "Found intents wheel, setting version to: ${BASH_REMATCH[1]}"
yq \
--inplace e -o json \
'del(.requirements[] | select(contains("home-assistant-intents")))' \
homeassistant/components/conversation/manifest.json
intents_version="${BASH_REMATCH[1]}" yq \
--inplace e -o json \
'.requirements += ["home-assistant-intents=="+env(intents_version)]' \
homeassistant/components/conversation/manifest.json
sed -i "s|home-assistant-intents==.*|home-assistant-intents==${BASH_REMATCH[1]}|" \
homeassistant/package_constraints.txt
python -m script.gen_requirements_all
fi
- name: Adjustments for armhf
if: matrix.arch == 'armhf'
run: |
# Pandas has issues building on armhf, it is expected they
# will drop the platform in the near future (they consider it
# "flimsy" on 386). The following packages depend on pandas,
# so we comment them out.
sed -i "s|env-canada|# env-canada|g" requirements_all.txt
sed -i "s|noaa-coops|# noaa-coops|g" requirements_all.txt
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
- name: Adjustments for 64-bit
if: matrix.arch == 'amd64' || matrix.arch == 'aarch64'
run: |
# Some speedups are only available on 64-bit, and since
# we build 32bit images on 64bit hosts, we only enable
# the speed ups on 64bit since the wheels for 32bit
# are not available.
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" requirements_all.txt
- name: Download Translations
run: python3 -m script.translations download
env:
LOKALISE_TOKEN: ${{ secrets.LOKALISE_TOKEN }}
python3 script/version_bump.py nightly
version="$(python setup.py -V)"
- name: Write meta info file
shell: bash
run: |
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
- name: Login to DockerHub
uses: docker/login-action@v1.10.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.0.0
uses: docker/login-action@v1.10.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2024.01.0
uses: home-assistant/builder@2021.07.0
with:
args: |
$BUILD_ARGS \
--${{ matrix.arch }} \
--cosign \
--target /data \
--with-codenotary "${{ secrets.VCN_USER }}" "${{ secrets.VCN_PASSWORD }}" "${{ secrets.VCN_ORG }}" \
--validate-from "${{ secrets.VCN_ORG }}" \
--generic ${{ needs.init.outputs.version }}
- name: Archive translations
shell: bash
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
- name: Upload translations
uses: actions/upload-artifact@v3
with:
name: translations
path: translations.tar.gz
if-no-files-found: error
build_machine:
name: Build ${{ matrix.machine }} machine core image
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
strategy:
matrix:
machine:
@@ -244,7 +155,6 @@ jobs:
- khadas-vim3
- odroid-c2
- odroid-c4
- odroid-m1
- odroid-n2
- odroid-xu
- qemuarm
@@ -257,50 +167,41 @@ jobs:
- raspberrypi3-64
- raspberrypi4
- raspberrypi4-64
- raspberrypi5-64
- tinker
- yellow
- green
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
- name: Set build additional args
run: |
# Create general tags
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
echo "BUILD_ARGS=--additional-tag dev" >> $GITHUB_ENV
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
echo "BUILD_ARGS=--additional-tag beta" >> $GITHUB_ENV
else
echo "BUILD_ARGS=--additional-tag stable" >> $GITHUB_ENV
fi
- name: Login to DockerHub
uses: docker/login-action@v1.10.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.0.0
uses: docker/login-action@v1.10.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build base image
uses: home-assistant/builder@2024.01.0
uses: home-assistant/builder@2021.07.0
with:
args: |
$BUILD_ARGS \
--target /data/machine \
--cosign \
--with-codenotary "${{ secrets.VCN_USER }}" "${{ secrets.VCN_PASSWORD }}" "${{ secrets.VCN_ORG }}" \
--validate-from "${{ secrets.VCN_ORG }}" \
--machine "${{ needs.init.outputs.version }}=${{ matrix.machine }}"
publish_ha:
name: Publish version files
environment: ${{ needs.init.outputs.channel }}
if: github.repository_owner == 'home-assistant'
needs: ["init", "build_machine"]
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -327,135 +228,108 @@ jobs:
channel: beta
publish_container:
name: Publish meta container for ${{ matrix.registry }}
environment: ${{ needs.init.outputs.channel }}
if: github.repository_owner == 'home-assistant'
name: Publish meta container
needs: ["init", "build_base"]
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
- name: Install Cosign
uses: sigstore/cosign-installer@v3.4.0
with:
cosign-release: "v2.0.2"
uses: actions/checkout@v2.3.4
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
uses: docker/login-action@v1.10.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
uses: docker/login-action@v3.0.0
uses: docker/login-action@v1.10.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Install VCN tools
uses: home-assistant/actions/helpers/vcn@master
- name: Build Meta Image
shell: bash
run: |
export DOCKER_CLI_EXPERIMENTAL=enabled
function create_manifest() {
local tag_l=${1}
local tag_r=${2}
local docker_reg=${1}
local tag_l=${2}
local tag_r=${3}
for registry in "ghcr.io/home-assistant" "docker.io/homeassistant"
do
docker manifest create "${docker_reg}/home-assistant:${tag_l}" \
"${docker_reg}/amd64-homeassistant:${tag_r}" \
"${docker_reg}/i386-homeassistant:${tag_r}" \
"${docker_reg}/armhf-homeassistant:${tag_r}" \
"${docker_reg}/armv7-homeassistant:${tag_r}" \
"${docker_reg}/aarch64-homeassistant:${tag_r}"
docker manifest create "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
"${registry}/i386-homeassistant:${tag_r}" \
"${registry}/armhf-homeassistant:${tag_r}" \
"${registry}/armv7-homeassistant:${tag_r}" \
"${registry}/aarch64-homeassistant:${tag_r}"
docker manifest annotate "${docker_reg}/home-assistant:${tag_l}" \
"${docker_reg}/amd64-homeassistant:${tag_r}" \
--os linux --arch amd64
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
--os linux --arch amd64
docker manifest annotate "${docker_reg}/home-assistant:${tag_l}" \
"${docker_reg}/i386-homeassistant:${tag_r}" \
--os linux --arch 386
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/i386-homeassistant:${tag_r}" \
--os linux --arch 386
docker manifest annotate "${docker_reg}/home-assistant:${tag_l}" \
"${docker_reg}/armhf-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v6
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armhf-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v6
docker manifest annotate "${docker_reg}/home-assistant:${tag_l}" \
"${docker_reg}/armv7-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v7
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/armv7-homeassistant:${tag_r}" \
--os linux --arch arm --variant=v7
docker manifest annotate "${docker_reg}/home-assistant:${tag_l}" \
"${docker_reg}/aarch64-homeassistant:${tag_r}" \
--os linux --arch arm64 --variant=v8
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/aarch64-homeassistant:${tag_r}" \
--os linux --arch arm64 --variant=v8
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
cosign sign --yes "${registry}/home-assistant:${tag_l}"
done
docker manifest push --purge "${docker_reg}/home-assistant:${tag_l}"
}
function validate_image() {
local image=${1}
if ! cosign verify --certificate-oidc-issuer https://token.actions.githubusercontent.com --certificate-identity-regexp https://github.com/home-assistant/core/.* "${image}"; then
state="$(vcn authenticate --org home-assistant.io --output json docker://${image} | jq '.verification.status // 2')"
if [[ "${state}" != "0" ]]; then
echo "Invalid signature!"
exit 1
fi
}
function push_dockerhub() {
local image=${1}
local tag=${2}
for docker_reg in "homeassistant" "ghcr.io/home-assistant"; do
docker pull "${docker_reg}/amd64-homeassistant:${{ needs.init.outputs.version }}"
docker pull "${docker_reg}/i386-homeassistant:${{ needs.init.outputs.version }}"
docker pull "${docker_reg}/armhf-homeassistant:${{ needs.init.outputs.version }}"
docker pull "${docker_reg}/armv7-homeassistant:${{ needs.init.outputs.version }}"
docker pull "${docker_reg}/aarch64-homeassistant:${{ needs.init.outputs.version }}"
docker tag "ghcr.io/home-assistant/${image}:${tag}" "docker.io/homeassistant/${image}:${tag}"
docker push "docker.io/homeassistant/${image}:${tag}"
cosign sign --yes "docker.io/homeassistant/${image}:${tag}"
}
validate_image "${docker_reg}/amd64-homeassistant:${{ needs.init.outputs.version }}"
validate_image "${docker_reg}/i386-homeassistant:${{ needs.init.outputs.version }}"
validate_image "${docker_reg}/armhf-homeassistant:${{ needs.init.outputs.version }}"
validate_image "${docker_reg}/armv7-homeassistant:${{ needs.init.outputs.version }}"
validate_image "${docker_reg}/aarch64-homeassistant:${{ needs.init.outputs.version }}"
# Pull images from github container registry and verify signature
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
# Create version tag
create_manifest "${docker_reg}" "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
# Create general tags
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
create_manifest "${docker_reg}" "dev" "${{ needs.init.outputs.version }}"
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
create_manifest "${docker_reg}" "beta" "${{ needs.init.outputs.version }}"
create_manifest "${docker_reg}" "rc" "${{ needs.init.outputs.version }}"
else
create_manifest "${docker_reg}" "stable" "${{ needs.init.outputs.version }}"
create_manifest "${docker_reg}" "latest" "${{ needs.init.outputs.version }}"
create_manifest "${docker_reg}" "beta" "${{ needs.init.outputs.version }}"
create_manifest "${docker_reg}" "rc" "${{ needs.init.outputs.version }}"
# Upload images to dockerhub
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
# Create version tag
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
# Create general tags
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
create_manifest "dev" "${{ needs.init.outputs.version }}"
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
create_manifest "beta" "${{ needs.init.outputs.version }}"
create_manifest "rc" "${{ needs.init.outputs.version }}"
else
create_manifest "stable" "${{ needs.init.outputs.version }}"
create_manifest "latest" "${{ needs.init.outputs.version }}"
create_manifest "beta" "${{ needs.init.outputs.version }}"
create_manifest "rc" "${{ needs.init.outputs.version }}"
# Create series version tag (e.g. 2021.6)
v="${{ needs.init.outputs.version }}"
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
fi
# Create series version tag (e.g. 2021.6)
v="${{ needs.init.outputs.version }}"
create_manifest "${docker_reg}" "${v%.*}" "${{ needs.init.outputs.version }}"
fi
done

File diff suppressed because it is too large Load Diff

View File

@@ -1,34 +0,0 @@
name: "CodeQL"
# yamllint disable-line rule:truthy
on:
schedule:
- cron: "30 18 * * 4"
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
timeout-minutes: 360
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.2
- name: Initialize CodeQL
uses: github/codeql-action/init@v3.24.7
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3.24.7
with:
category: "/language:python"

View File

@@ -7,15 +7,14 @@ on:
jobs:
lock:
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5.0.1
- uses: dessant/lock-threads@v2.1.2
with:
github-token: ${{ github.token }}
issue-inactive-days: "30"
exclude-issue-created-before: "2020-10-01T00:00:00Z"
issue-lock-inactive-days: "30"
issue-exclude-created-before: "2020-10-01T00:00:00Z"
issue-lock-reason: ""
pr-inactive-days: "1"
exclude-pr-created-before: "2020-11-01T00:00:00Z"
pr-lock-inactive-days: "1"
pr-exclude-created-before: "2020-11-01T00:00:00Z"
pr-lock-reason: ""

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([EF]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@@ -1,18 +0,0 @@
{
"problemMatcher": [
{
"owner": "python",
"pattern": [
{
"regexp": "^=+ slowest durations =+$"
},
{
"regexp": "^((.*s)\\s(call|setup|teardown)\\s+(.*)::(.*))$",
"message": 1,
"file": 2,
"loop": true
}
]
}
]
}

View File

@@ -8,94 +8,84 @@ on:
jobs:
stale:
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
steps:
# The 60 day stale policy for PRs
# The 90 day stale policy
# Used for:
# - PRs
# - Issues & PRs
# - No PRs marked as no-stale
# - No issues (-1)
- name: 60 days stale PRs policy
uses: actions/stale@v9.0.0
# - No issues marked as no-stale or help-wanted
- name: 90 days stale issues & PRs policy
uses: actions/stale@v4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60
days-before-close: 7
days-before-issue-stale: -1
days-before-issue-close: -1
operations-per-run: 150
remove-stale-when-updated: true
stale-pr-label: "stale"
exempt-pr-labels: "no-stale"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
If you are the author of this PR, please leave a comment if you want
to keep it open. Also, please rebase your PR onto the latest dev
branch to ensure that it's up to date with the latest changes.
Thank you for your contribution!
# Generate a token for the GitHub App, we use this method to avoid
# hitting API limits for our GitHub actions + have a higher rate limit.
# This is only used for issues.
- name: Generate app token
id: token
# Pinned to a specific version of the action for security reasons
# v1.7.0
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
with:
app_id: ${{ secrets.ISSUE_TRIAGE_APP_ID }}
private_key: ${{ secrets.ISSUE_TRIAGE_APP_PEM }}
# The 90 day stale policy for issues
# Used for:
# - Issues
# - No issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: 90 days stale issues
uses: actions/stale@v9.0.0
with:
repo-token: ${{ steps.token.outputs.token }}
days-before-stale: 90
days-before-close: 7
days-before-pr-stale: -1
days-before-pr-close: -1
operations-per-run: 250
remove-stale-when-updated: true
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,help-wanted,needs-more-information"
stale-issue-message: >
There hasn't been any activity on this issue recently. Due to the
high number of incoming GitHub notifications, we have to clean some
of the old issues, as many of them have already been resolved with
the latest updates.
Please make sure to update to the latest Home Assistant version and
check if that solves the issue. Let us know if that works for you by
adding a comment 👍
This issue has now been marked as stale and will be closed if no
further activity occurs. Thank you for your contributions.
# The 30 day stale policy for issues
# Used for:
# - Issues that are pending more information (incomplete issues)
# - No Issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: Needs more information stale issues policy
uses: actions/stale@v9.0.0
with:
repo-token: ${{ steps.token.outputs.token }}
only-labels: "needs-more-information"
days-before-stale: 14
days-before-close: 7
days-before-pr-stale: -1
days-before-pr-close: -1
operations-per-run: 250
operations-per-run: 150
remove-stale-when-updated: true
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,help-wanted"
stale-issue-message: >
There hasn't been any activity on this issue recently. Due to the
high number of incoming GitHub notifications, we have to clean some
of the old issues, as many of them have already been resolved with
the latest updates.
Please make sure to update to the latest Home Assistant version and
check if that solves the issue. Let us know if that works for you by
adding a comment 👍
This issue has now been marked as stale and will be closed if no
further activity occurs. Thank you for your contributions.
stale-pr-label: "stale"
exempt-pr-labels: "no-stale"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
Thank you for your contributions.
# The 30 day stale policy for PRS
# Used for:
# - PRs
# - No PRs marked as no-stale or new-integrations
# - No issues (-1)
- name: 30 days stale PRs policy
uses: actions/stale@v4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30
days-before-close: 7
days-before-issue-close: -1
operations-per-run: 50
remove-stale-when-updated: true
stale-pr-label: "stale"
# Exempt new integrations, these often take more time.
# They will automatically be handled by the 90 day version above.
exempt-pr-labels: "no-stale,new-integration"
stale-pr-message: >
There hasn't been any activity on this pull request recently. This
pull request has been automatically marked as stale because of that
and will be closed if no further activity occurs within 7 days.
Thank you for your contributions.
# The 30 day stale policy for issues
# Used for:
# - Issues that are pending more information (incomplete issues)
# - No Issues marked as no-stale or help-wanted
# - No PRs (-1)
- name: Needs more information stale issues policy
uses: actions/stale@v4
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
only-labels: "needs-more-information"
days-before-stale: 14
days-before-close: 7
days-before-pr-close: -1
operations-per-run: 50
remove-stale-when-updated: true
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,help-wanted"

64
.github/workflows/translations.yaml vendored Normal file
View File

@@ -0,0 +1,64 @@
name: Translations
# yamllint disable-line rule:truthy
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *"
push:
branches:
- dev
paths:
- "**strings.json"
env:
DEFAULT_PYTHON: 3.8
jobs:
upload:
name: Upload
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Upload Translations
run: |
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
python3 -m script.translations upload
download:
name: Download
needs: upload
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Download Translations
run: |
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
python3 -m script.translations download
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
with:
name: GitHub Action
email: github-action@users.noreply.github.com
- name: Update translation
run: |
git add homeassistant
git commit -am "[ci skip] Translation update"
git push

View File

@@ -1,32 +0,0 @@
name: Translations
# yamllint disable-line rule:truthy
on:
workflow_dispatch:
push:
branches:
- dev
paths:
- "**strings.json"
env:
DEFAULT_PYTHON: "3.11"
jobs:
upload:
name: Upload
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v5.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Upload Translations
run: |
export LOKALISE_TOKEN="${{ secrets.LOKALISE_TOKEN }}"
python3 -m script.translations upload

View File

@@ -10,25 +10,18 @@ on:
- dev
- rc
paths:
- ".github/workflows/wheels.yml"
- "homeassistant/package_constraints.txt"
- "requirements_all.txt"
- "requirements.txt"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name}}
cancel-in-progress: true
- "requirements_all.txt"
jobs:
init:
name: Initialize wheels builder
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
outputs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
- name: Get information
id: info
@@ -49,209 +42,125 @@ jobs:
echo "GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true"
echo "GRPC_PYTHON_BUILD_WITH_CYTHON=true"
echo "GRPC_PYTHON_DISABLE_LIBC_COMPATIBILITY=true"
echo "GRPC_PYTHON_LDFLAGS=-lpthread -Wl,-wrap,memcpy -static-libgcc"
# Fix out of memory issues with rust
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
# OpenCV headless installation
echo "CI_BUILD=1"
echo "ENABLE_HEADLESS=1"
# Use C-Extension for SQLAlchemy
echo "REQUIRE_SQLALCHEMY_CEXT=1"
) > .env_file
- name: Upload env_file
uses: actions/upload-artifact@v4.3.1
uses: actions/upload-artifact@v2.2.4
with:
name: env_file
path: ./.env_file
overwrite: true
- name: Upload requirements_diff
uses: actions/upload-artifact@v4.3.1
uses: actions/upload-artifact@v2.2.4
with:
name: requirements_diff
path: ./requirements_diff.txt
overwrite: true
core:
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
if: github.repository_owner == 'home-assistant'
name: Build wheels with ${{ matrix.tag }} (${{ matrix.arch }}) for core
needs: init
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
abi: ["cp312"]
arch: ${{ fromJson(needs.init.outputs.architectures) }}
tag:
- "3.9-alpine3.14"
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
- name: Download env_file
uses: actions/download-artifact@v4.1.4
uses: actions/download-artifact@v2
with:
name: env_file
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.4
uses: actions/download-artifact@v2
with:
name: requirements_diff
- name: Build wheels
uses: home-assistant/wheels@2024.01.0
uses: home-assistant/wheels@2021.07.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
tag: ${{ matrix.tag }}
arch: ${{ matrix.arch }}
wheels-host: wheels.hass.io
wheels-key: ${{ secrets.WHEELS_KEY }}
wheels-user: wheels
env-file: true
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
apk: "build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev"
pip: "Cython;numpy"
skip-binary: aiohttp
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements-diff: 'requirements_diff.txt'
requirements: "requirements.txt"
integrations:
name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }}
if: github.repository_owner == 'home-assistant'
name: Build wheels with ${{ matrix.tag }} (${{ matrix.arch }}) for integrations
needs: init
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
abi: ["cp312"]
arch: ${{ fromJson(needs.init.outputs.architectures) }}
tag:
- "3.9-alpine3.14"
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.2
uses: actions/checkout@v2.3.4
- name: Download env_file
uses: actions/download-artifact@v4.1.4
uses: actions/download-artifact@v2
with:
name: env_file
- name: Download requirements_diff
uses: actions/download-artifact@v4.1.4
uses: actions/download-artifact@v2
with:
name: requirements_diff
- name: (Un)comment packages
- name: Uncomment packages
run: |
requirement_files="requirements_all.txt requirements_diff.txt"
for requirement_file in ${requirement_files}; do
sed -i "s|# pybluez|pybluez|g" ${requirement_file}
sed -i "s|# bluepy|bluepy|g" ${requirement_file}
sed -i "s|# beacontools|beacontools|g" ${requirement_file}
sed -i "s|# RPi.GPIO|RPi.GPIO|g" ${requirement_file}
sed -i "s|# raspihats|raspihats|g" ${requirement_file}
sed -i "s|# rpi-rf|rpi-rf|g" ${requirement_file}
sed -i "s|# blinkt|blinkt|g" ${requirement_file}
sed -i "s|# fritzconnection|fritzconnection|g" ${requirement_file}
sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file}
sed -i "s|# evdev|evdev|g" ${requirement_file}
sed -i "s|# smbus-cffi|smbus-cffi|g" ${requirement_file}
sed -i "s|# i2csense|i2csense|g" ${requirement_file}
sed -i "s|# python-eq3bt|python-eq3bt|g" ${requirement_file}
sed -i "s|# pycups|pycups|g" ${requirement_file}
sed -i "s|# homekit|homekit|g" ${requirement_file}
sed -i "s|# decora-wifi|decora-wifi|g" ${requirement_file}
sed -i "s|# decora_wifi|decora_wifi|g" ${requirement_file}
sed -i "s|# decora|decora|g" ${requirement_file}
sed -i "s|# avion|avion|g" ${requirement_file}
sed -i "s|# PySwitchbot|PySwitchbot|g" ${requirement_file}
sed -i "s|# pySwitchmate|pySwitchmate|g" ${requirement_file}
sed -i "s|# face_recognition|face_recognition|g" ${requirement_file}
sed -i "s|# bme680|bme680|g" ${requirement_file}
sed -i "s|# python-gammu|python-gammu|g" ${requirement_file}
# Some packages are not buildable on armhf anymore
if [ "${{ matrix.arch }}" = "armhf" ]; then
# Pandas has issues building on armhf, it is expected they
# will drop the platform in the near future (they consider it
# "flimsy" on 386). The following packages depend on pandas,
# so we comment them out.
sed -i "s|env-canada|# env-canada|g" ${requirement_file}
sed -i "s|noaa-coops|# noaa-coops|g" ${requirement_file}
sed -i "s|pyezviz|# pyezviz|g" ${requirement_file}
sed -i "s|pykrakenapi|# pykrakenapi|g" ${requirement_file}
fi
# Some speedups are only for 64-bit
if [ "${{ matrix.arch }}" = "amd64" ] || [ "${{ matrix.arch }}" = "aarch64" ]; then
sed -i "s|aiohttp-zlib-ng|aiohttp-zlib-ng\[isal\]|g" ${requirement_file}
fi
done
- name: Split requirements all
run: |
# We split requirements all into two different files.
# This is to prevent the build from running out of memory when
# resolving packages on 32-bits systems (like armhf, armv7).
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all.txt requirements_all.txt
- name: Create requirements for cython<3
run: |
# Some dependencies still require 'cython<3'
# and don't yet use isolated build environments.
# Build these first.
# grpcio: https://github.com/grpc/grpc/issues/33918
# pydantic: https://github.com/pydantic/pydantic/issues/7689
touch requirements_old-cython.txt
cat homeassistant/package_constraints.txt | grep 'grpcio==' >> requirements_old-cython.txt
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
- name: Adjust build env
run: |
if [ "${{ matrix.arch }}" = "i386" ]; then
echo "NPY_DISABLE_SVML=1" >> .env_file
fi
# Do not pin numpy in wheels building
sed -i "/numpy/d" homeassistant/package_constraints.txt
- name: Build wheels (old cython)
uses: home-assistant/wheels@2024.01.0
- name: Build wheels
uses: home-assistant/wheels@2021.07.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
tag: ${{ matrix.tag }}
arch: ${{ matrix.arch }}
wheels-host: wheels.hass.io
wheels-key: ${{ secrets.WHEELS_KEY }}
wheels-user: wheels
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
apk: "build-base;cmake;git;linux-headers;libexecinfo-dev;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev"
pip: "Cython;numpy;scikit-build"
skip-binary: aiohttp
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_old-cython.txt"
pip: "'cython<3'"
- name: Build wheels (part 1)
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtaa"
- name: Build wheels (part 2)
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtab"
- name: Build wheels (part 3)
uses: home-assistant/wheels@2024.01.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }}
env-file: true
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf
constraints: "homeassistant/package_constraints.txt"
requirements-diff: "requirements_diff.txt"
requirements: "requirements_all.txtac"
requirements-diff: 'requirements_diff.txt'
requirements: "requirements_all.txt"

10
.gitignore vendored
View File

@@ -1,4 +1,4 @@
/config
config/*
config2/*
tests/testing_config/deps
@@ -8,9 +8,6 @@ tests/testing_config/home-assistant.log*
data/
.token
# Translations
homeassistant/components/*/translations
# Hide sublime text stuff
*.sublime-project
*.sublime-workspace
@@ -61,13 +58,13 @@ pip-log.txt
# Unit test / coverage reports
.coverage
.tox
coverage.xml
nosetests.xml
htmlcov/
test-reports/
test-results.xml
test-output.xml
pytest-*.txt
# Translations
*.mo
@@ -111,6 +108,9 @@ virtualization/vagrant/config
!.vscode/tasks.json
.env
# Built docs
docs/build
# Windows Explorer
desktop.ini
/home-assistant.pyproj

View File

@@ -3,4 +3,3 @@ ignored:
- DL3008
- DL3013
- DL3018
- DL3042

6
.ignore Normal file
View File

@@ -0,0 +1,6 @@
# Patterns matched in this file will be ignored by supported search utilities
# Ignore generated html and javascript files
/homeassistant/components/frontend/www_static/*.html
/homeassistant/components/frontend/www_static/*.js
/homeassistant/components/frontend/www_static/panels/*.html

View File

@@ -1,24 +1,55 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
- repo: https://github.com/asottile/pyupgrade
rev: v2.23.3
hooks:
- id: ruff
- id: pyupgrade
args: [--py38-plus]
- repo: https://github.com/psf/black
rev: 21.7b0
hooks:
- id: black
args:
- --fix
- id: ruff-format
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
- --safe
- --quiet
files: ^((homeassistant|script|tests)/.+)?[^/]+\.py$
- repo: https://github.com/codespell-project/codespell
rev: v2.2.2
rev: v2.0.0
hooks:
- id: codespell
args:
- --ignore-words-list=additionals,alle,alot,bund,currenty,datas,farenheit,falsy,fo,haa,hass,iif,incomfort,ines,ist,nam,nd,pres,pullrequests,resset,rime,ser,serie,te,technik,ue,unsecure,withing,zar
- --skip="./.*,*.csv,*.json,*.ambr"
- --ignore-words-list=hass,alot,datas,dof,dur,ether,farenheit,hist,iff,ines,ist,lightsensor,mut,nd,pres,referer,ser,serie,te,technik,ue,uint,visability,wan,wanna,withing,iam,incomfort,ba
- --skip="./.*,*.csv,*.json"
- --quiet-level=2
exclude_types: [csv, json]
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
exclude: ^tests/fixtures/
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
hooks:
- id: flake8
additional_dependencies:
- pycodestyle==2.7.0
- pyflakes==2.3.1
- flake8-docstrings==1.6.0
- pydocstyle==6.0.0
- flake8-comprehensions==3.5.0
- flake8-noqa==1.1.0
- mccabe==0.6.1
files: ^(homeassistant|script|tests)/.+\.py$
- repo: https://github.com/PyCQA/bandit
rev: 1.7.0
hooks:
- id: bandit
args:
- --quiet
- --format=custom
- --configfile=tests/bandit.yaml
files: ^(homeassistant|script|tests)/.+\.py$
- repo: https://github.com/PyCQA/isort
rev: 5.9.3
hooks:
- id: isort
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v3.2.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
@@ -30,24 +61,24 @@ repos:
- --branch=master
- --branch=rc
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.32.0
rev: v1.26.1
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.0.3
rev: v2.2.1
hooks:
- id: prettier
stages: [manual]
- repo: https://github.com/cdce8p/python-typing-update
rev: v0.6.0
rev: v0.3.5
hooks:
# Run `python-typing-update` hook manually from time to time
# to update python typing syntax.
# Will require manual work, before submitting changes!
# pre-commit run --hook-stage manual python-typing-update --all-files
- id: python-typing-update
stages: [manual]
args:
- --py311-plus
- --py38-plus
- --force
- --keep-updates
files: ^(homeassistant|tests|script)/.+\.py$
@@ -63,12 +94,6 @@ repos:
language: script
types: [python]
require_serial: true
files: ^(homeassistant|pylint)/.+\.py$
- id: pylint
name: pylint
entry: script/run-in-env.sh pylint -j 0 --ignore-missing-annotations=y
language: script
types: [python]
files: ^homeassistant/.+\.py$
- id: gen_requirements_all
name: gen_requirements_all
@@ -76,25 +101,11 @@ repos:
pass_filenames: false
language: script
types: [text]
files: ^(homeassistant/.+/manifest\.json|homeassistant/brands/.+\.json|pyproject\.toml|\.pre-commit-config\.yaml|script/gen_requirements_all\.py)$
files: ^(homeassistant/.+/manifest\.json|\.pre-commit-config\.yaml)$
- id: hassfest
name: hassfest
entry: script/run-in-env.sh python3 -m script.hassfest
pass_filenames: false
language: script
types: [text]
files: ^(homeassistant/.+/(icons|manifest|strings)\.json|homeassistant/brands/.*\.json|\.coveragerc|homeassistant/.+/services\.yaml|script/hassfest/(?!metadata|mypy_config).+\.py)$
- id: hassfest-metadata
name: hassfest-metadata
entry: script/run-in-env.sh python3 -m script.hassfest -p metadata
pass_filenames: false
language: script
types: [text]
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml)$
- id: hassfest-mypy-config
name: hassfest-mypy-config
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
pass_filenames: false
language: script
types: [text]
files: ^(script/hassfest/mypy_config\.py|\.strict-typing|mypy\.ini)$
files: ^(homeassistant/.+/(manifest|strings)\.json|\.coveragerc|homeassistant/.+/services\.yaml)$

View File

@@ -1,7 +1,5 @@
*.md
.strict-typing
azure-*.yml
docs/source/_templates/*
homeassistant/components/*/translations/*.json
homeassistant/generated/*
tests/components/lidarr/fixtures/initialize.js
tests/components/lidarr/fixtures/initialize-wrong.js
tests/fixtures/core/config/yaml_errors/
tests/fixtures/*

10
.readthedocs.yml Normal file
View File

@@ -0,0 +1,10 @@
# .readthedocs.yml
build:
image: latest
python:
version: 3.8
setup_py_install: true
requirements_file: requirements_docs.txt

View File

@@ -2,481 +2,115 @@
# If component is fully covered with type annotations, please add it here
# to enable strict mypy checks.
# Strict typing is enabled by default for core files.
# Add it here to add 'disallow_any_generics'.
# --- Only for core file! ---
homeassistant.auth.auth_store
homeassistant.auth.providers.*
homeassistant.core
homeassistant.exceptions
homeassistant.helpers.area_registry
homeassistant.helpers.condition
homeassistant.helpers.debounce
homeassistant.helpers.deprecation
homeassistant.helpers.device_registry
homeassistant.helpers.discovery
homeassistant.helpers.dispatcher
homeassistant.helpers.entity
homeassistant.helpers.entity_platform
homeassistant.helpers.entity_values
homeassistant.helpers.event
homeassistant.helpers.reload
homeassistant.helpers.script_variables
homeassistant.helpers.singleton
homeassistant.helpers.sun
homeassistant.helpers.translation
homeassistant.loader
homeassistant.requirements
homeassistant.runner
homeassistant.setup
homeassistant.util.async_
homeassistant.util.color
homeassistant.util.decorator
homeassistant.util.location
homeassistant.util.logging
homeassistant.util.process
homeassistant.util.unit_system
# --- Add components below this line ---
homeassistant.components
homeassistant.components.abode.*
homeassistant.components.accuweather.*
homeassistant.components.acer_projector.*
homeassistant.components.acmeda.*
homeassistant.components.accuweather.*
homeassistant.components.actiontec.*
homeassistant.components.adax.*
homeassistant.components.adguard.*
homeassistant.components.aftership.*
homeassistant.components.air_quality.*
homeassistant.components.airly.*
homeassistant.components.airnow.*
homeassistant.components.airq.*
homeassistant.components.airthings.*
homeassistant.components.airthings_ble.*
homeassistant.components.airtouch5.*
homeassistant.components.airvisual.*
homeassistant.components.airvisual_pro.*
homeassistant.components.airzone.*
homeassistant.components.airzone_cloud.*
homeassistant.components.aladdin_connect.*
homeassistant.components.alarm_control_panel.*
homeassistant.components.alert.*
homeassistant.components.alexa.*
homeassistant.components.alpha_vantage.*
homeassistant.components.amazon_polly.*
homeassistant.components.amberelectric.*
homeassistant.components.ambiclimate.*
homeassistant.components.ambee.*
homeassistant.components.ambient_station.*
homeassistant.components.amcrest.*
homeassistant.components.ampio.*
homeassistant.components.analytics.*
homeassistant.components.analytics_insights.*
homeassistant.components.android_ip_webcam.*
homeassistant.components.androidtv.*
homeassistant.components.androidtv_remote.*
homeassistant.components.anel_pwrctrl.*
homeassistant.components.anova.*
homeassistant.components.anthemav.*
homeassistant.components.apache_kafka.*
homeassistant.components.apcupsd.*
homeassistant.components.api.*
homeassistant.components.apple_tv.*
homeassistant.components.apprise.*
homeassistant.components.aprs.*
homeassistant.components.aqualogic.*
homeassistant.components.aquostv.*
homeassistant.components.aranet.*
homeassistant.components.arcam_fmj.*
homeassistant.components.arris_tg2492lg.*
homeassistant.components.aruba.*
homeassistant.components.arwn.*
homeassistant.components.aseko_pool_live.*
homeassistant.components.assist_pipeline.*
homeassistant.components.asterisk_cdr.*
homeassistant.components.asterisk_mbox.*
homeassistant.components.asuswrt.*
homeassistant.components.auth.*
homeassistant.components.automation.*
homeassistant.components.awair.*
homeassistant.components.axis.*
homeassistant.components.backup.*
homeassistant.components.baf.*
homeassistant.components.bang_olufsen.*
homeassistant.components.bayesian.*
homeassistant.components.binary_sensor.*
homeassistant.components.bitcoin.*
homeassistant.components.blockchain.*
homeassistant.components.blue_current.*
homeassistant.components.blueprint.*
homeassistant.components.bluetooth.*
homeassistant.components.bluetooth_adapters.*
homeassistant.components.bluetooth_tracker.*
homeassistant.components.bmw_connected_drive.*
homeassistant.components.bond.*
homeassistant.components.braviatv.*
homeassistant.components.brother.*
homeassistant.components.browser.*
homeassistant.components.bthome.*
homeassistant.components.button.*
homeassistant.components.calendar.*
homeassistant.components.camera.*
homeassistant.components.canary.*
homeassistant.components.cert_expiry.*
homeassistant.components.clickatell.*
homeassistant.components.clicksend.*
homeassistant.components.climate.*
homeassistant.components.cloud.*
homeassistant.components.co2signal.*
homeassistant.components.command_line.*
homeassistant.components.config.*
homeassistant.components.configurator.*
homeassistant.components.counter.*
homeassistant.components.cover.*
homeassistant.components.cpuspeed.*
homeassistant.components.crownstone.*
homeassistant.components.date.*
homeassistant.components.datetime.*
homeassistant.components.deconz.*
homeassistant.components.default_config.*
homeassistant.components.demo.*
homeassistant.components.derivative.*
homeassistant.components.device_automation.*
homeassistant.components.device_tracker.*
homeassistant.components.devolo_home_control.*
homeassistant.components.devolo_home_network.*
homeassistant.components.dhcp.*
homeassistant.components.diagnostics.*
homeassistant.components.discovergy.*
homeassistant.components.dlna_dmr.*
homeassistant.components.dlna_dms.*
homeassistant.components.dnsip.*
homeassistant.components.doorbird.*
homeassistant.components.dormakaba_dkey.*
homeassistant.components.downloader.*
homeassistant.components.dsmr.*
homeassistant.components.duckdns.*
homeassistant.components.dunehd.*
homeassistant.components.duotecno.*
homeassistant.components.easyenergy.*
homeassistant.components.ecovacs.*
homeassistant.components.ecowitt.*
homeassistant.components.efergy.*
homeassistant.components.electrasmart.*
homeassistant.components.electric_kiwi.*
homeassistant.components.elgato.*
homeassistant.components.elkm1.*
homeassistant.components.emulated_hue.*
homeassistant.components.energy.*
homeassistant.components.energyzero.*
homeassistant.components.enigma2.*
homeassistant.components.enphase_envoy.*
homeassistant.components.esphome.*
homeassistant.components.event.*
homeassistant.components.evil_genius_labs.*
homeassistant.components.evohome.*
homeassistant.components.faa_delays.*
homeassistant.components.fan.*
homeassistant.components.energy.*
homeassistant.components.fastdotcom.*
homeassistant.components.feedreader.*
homeassistant.components.file_upload.*
homeassistant.components.filesize.*
homeassistant.components.filter.*
homeassistant.components.fitbit.*
homeassistant.components.flexit_bacnet.*
homeassistant.components.flux_led.*
homeassistant.components.flunearyou.*
homeassistant.components.forecast_solar.*
homeassistant.components.fritz.*
homeassistant.components.fritzbox.*
homeassistant.components.fritzbox_callmonitor.*
homeassistant.components.fronius.*
homeassistant.components.frontend.*
homeassistant.components.fully_kiosk.*
homeassistant.components.generic_hygrostat.*
homeassistant.components.generic_thermostat.*
homeassistant.components.fritz.*
homeassistant.components.geo_location.*
homeassistant.components.geocaching.*
homeassistant.components.gios.*
homeassistant.components.glances.*
homeassistant.components.goalzero.*
homeassistant.components.google.*
homeassistant.components.google_assistant_sdk.*
homeassistant.components.google_sheets.*
homeassistant.components.gpsd.*
homeassistant.components.greeneye_monitor.*
homeassistant.components.group.*
homeassistant.components.guardian.*
homeassistant.components.hardkernel.*
homeassistant.components.hardware.*
homeassistant.components.here_travel_time.*
homeassistant.components.history.*
homeassistant.components.history_stats.*
homeassistant.components.holiday.*
homeassistant.components.homeassistant.*
homeassistant.components.homeassistant_alerts.*
homeassistant.components.homeassistant_green.*
homeassistant.components.homeassistant_hardware.*
homeassistant.components.homeassistant_sky_connect.*
homeassistant.components.homeassistant_yellow.*
homeassistant.components.homekit.*
homeassistant.components.homekit_controller
homeassistant.components.homekit_controller.alarm_control_panel
homeassistant.components.homekit_controller.button
homeassistant.components.homekit_controller.config_flow
homeassistant.components.homekit_controller.const
homeassistant.components.homekit_controller.lock
homeassistant.components.homekit_controller.select
homeassistant.components.homekit_controller.storage
homeassistant.components.homekit_controller.utils
homeassistant.components.homewizard.*
homeassistant.components.homeworks.*
homeassistant.components.homeassistant.triggers.event
homeassistant.components.http.*
homeassistant.components.huawei_lte.*
homeassistant.components.humidifier.*
homeassistant.components.hydrawise.*
homeassistant.components.hyperion.*
homeassistant.components.ibeacon.*
homeassistant.components.idasen_desk.*
homeassistant.components.image.*
homeassistant.components.image_processing.*
homeassistant.components.image_upload.*
homeassistant.components.imap.*
homeassistant.components.input_button.*
homeassistant.components.input_select.*
homeassistant.components.input_text.*
homeassistant.components.integration.*
homeassistant.components.intent.*
homeassistant.components.intent_script.*
homeassistant.components.ios.*
homeassistant.components.ipp.*
homeassistant.components.iqvia.*
homeassistant.components.islamic_prayer_times.*
homeassistant.components.isy994.*
homeassistant.components.jellyfin.*
homeassistant.components.jewish_calendar.*
homeassistant.components.jvc_projector.*
homeassistant.components.kaleidescape.*
homeassistant.components.knx.*
homeassistant.components.kraken.*
homeassistant.components.lacrosse.*
homeassistant.components.lacrosse_view.*
homeassistant.components.lamarzocco.*
homeassistant.components.lametric.*
homeassistant.components.laundrify.*
homeassistant.components.lawn_mower.*
homeassistant.components.lcn.*
homeassistant.components.ld2410_ble.*
homeassistant.components.led_ble.*
homeassistant.components.lidarr.*
homeassistant.components.lifx.*
homeassistant.components.light.*
homeassistant.components.linear_garage_door.*
homeassistant.components.litejet.*
homeassistant.components.litterrobot.*
homeassistant.components.local_ip.*
homeassistant.components.local_todo.*
homeassistant.components.lock.*
homeassistant.components.logbook.*
homeassistant.components.logger.*
homeassistant.components.london_underground.*
homeassistant.components.lookin.*
homeassistant.components.luftdaten.*
homeassistant.components.mailbox.*
homeassistant.components.map.*
homeassistant.components.mastodon.*
homeassistant.components.matrix.*
homeassistant.components.matter.*
homeassistant.components.media_extractor.*
homeassistant.components.media_player.*
homeassistant.components.media_source.*
homeassistant.components.met_eireann.*
homeassistant.components.metoffice.*
homeassistant.components.mikrotik.*
homeassistant.components.min_max.*
homeassistant.components.minecraft_server.*
homeassistant.components.mjpeg.*
homeassistant.components.modbus.*
homeassistant.components.modem_callerid.*
homeassistant.components.moon.*
homeassistant.components.mopeka.*
homeassistant.components.motionmount.*
homeassistant.components.mqtt.*
homeassistant.components.my.*
homeassistant.components.mysensors.*
homeassistant.components.myuplink.*
homeassistant.components.nam.*
homeassistant.components.nanoleaf.*
homeassistant.components.neato.*
homeassistant.components.nest.*
homeassistant.components.netatmo.*
homeassistant.components.network.*
homeassistant.components.nextdns.*
homeassistant.components.nfandroidtv.*
homeassistant.components.nightscout.*
homeassistant.components.nissan_leaf.*
homeassistant.components.no_ip.*
homeassistant.components.notify.*
homeassistant.components.notion.*
homeassistant.components.number.*
homeassistant.components.nut.*
homeassistant.components.onboarding.*
homeassistant.components.oncue.*
homeassistant.components.onewire.*
homeassistant.components.open_meteo.*
homeassistant.components.openexchangerates.*
homeassistant.components.opensky.*
homeassistant.components.openuv.*
homeassistant.components.oralb.*
homeassistant.components.otbr.*
homeassistant.components.overkiz.*
homeassistant.components.p1_monitor.*
homeassistant.components.peco.*
homeassistant.components.persistent_notification.*
homeassistant.components.pi_hole.*
homeassistant.components.ping.*
homeassistant.components.plugwise.*
homeassistant.components.poolsense.*
homeassistant.components.powerwall.*
homeassistant.components.private_ble_device.*
homeassistant.components.prometheus.*
homeassistant.components.proximity.*
homeassistant.components.prusalink.*
homeassistant.components.pure_energie.*
homeassistant.components.purpleair.*
homeassistant.components.pushbullet.*
homeassistant.components.pvoutput.*
homeassistant.components.qnap_qsw.*
homeassistant.components.rabbitair.*
homeassistant.components.radarr.*
homeassistant.components.rainforest_raven.*
homeassistant.components.rainmachine.*
homeassistant.components.raspberry_pi.*
homeassistant.components.rdw.*
homeassistant.components.recollect_waste.*
homeassistant.components.recorder.*
homeassistant.components.recorder.purge
homeassistant.components.recorder.repack
homeassistant.components.recorder.statistics
homeassistant.components.remote.*
homeassistant.components.renault.*
homeassistant.components.repairs.*
homeassistant.components.rest.*
homeassistant.components.rest_command.*
homeassistant.components.rfxtrx.*
homeassistant.components.rhasspy.*
homeassistant.components.ridwell.*
homeassistant.components.rituals_perfume_genie.*
homeassistant.components.roku.*
homeassistant.components.romy.*
homeassistant.components.rpi_power.*
homeassistant.components.rss_feed_template.*
homeassistant.components.rtsp_to_webrtc.*
homeassistant.components.ruuvi_gateway.*
homeassistant.components.ruuvitag_ble.*
homeassistant.components.samsungtv.*
homeassistant.components.scene.*
homeassistant.components.schedule.*
homeassistant.components.scrape.*
homeassistant.components.search.*
homeassistant.components.select.*
homeassistant.components.sensibo.*
homeassistant.components.sensirion_ble.*
homeassistant.components.sensor.*
homeassistant.components.senz.*
homeassistant.components.sfr_box.*
homeassistant.components.shelly.*
homeassistant.components.shopping_list.*
homeassistant.components.simplepush.*
homeassistant.components.simplisafe.*
homeassistant.components.siren.*
homeassistant.components.skybell.*
homeassistant.components.slack.*
homeassistant.components.sleepiq.*
homeassistant.components.smhi.*
homeassistant.components.snooz.*
homeassistant.components.sonarr.*
homeassistant.components.speedtestdotnet.*
homeassistant.components.sql.*
homeassistant.components.sonos.media_player
homeassistant.components.ssdp.*
homeassistant.components.starlink.*
homeassistant.components.statistics.*
homeassistant.components.steamist.*
homeassistant.components.stookalert.*
homeassistant.components.stream.*
homeassistant.components.streamlabswater.*
homeassistant.components.stt.*
homeassistant.components.suez_water.*
homeassistant.components.sun.*
homeassistant.components.surepetcare.*
homeassistant.components.switch.*
homeassistant.components.switchbee.*
homeassistant.components.switchbot_cloud.*
homeassistant.components.switcher_kis.*
homeassistant.components.synology_dsm.*
homeassistant.components.system_health.*
homeassistant.components.system_log.*
homeassistant.components.systemmonitor.*
homeassistant.components.tag.*
homeassistant.components.tailscale.*
homeassistant.components.tailwind.*
homeassistant.components.tami4.*
homeassistant.components.tautulli.*
homeassistant.components.tcp.*
homeassistant.components.technove.*
homeassistant.components.tedee.*
homeassistant.components.text.*
homeassistant.components.threshold.*
homeassistant.components.tibber.*
homeassistant.components.tile.*
homeassistant.components.tilt_ble.*
homeassistant.components.time.*
homeassistant.components.time_date.*
homeassistant.components.timer.*
homeassistant.components.tod.*
homeassistant.components.todo.*
homeassistant.components.tolo.*
homeassistant.components.tplink.*
homeassistant.components.tplink_omada.*
homeassistant.components.trace.*
homeassistant.components.tractive.*
homeassistant.components.tradfri.*
homeassistant.components.trafikverket_camera.*
homeassistant.components.trafikverket_ferry.*
homeassistant.components.trafikverket_train.*
homeassistant.components.trafikverket_weatherstation.*
homeassistant.components.transmission.*
homeassistant.components.trend.*
homeassistant.components.tts.*
homeassistant.components.twentemilieu.*
homeassistant.components.unifi.*
homeassistant.components.unifiprotect.*
homeassistant.components.upcloud.*
homeassistant.components.update.*
homeassistant.components.uptime.*
homeassistant.components.uptimerobot.*
homeassistant.components.usb.*
homeassistant.components.vacuum.*
homeassistant.components.vallox.*
homeassistant.components.valve.*
homeassistant.components.velbus.*
homeassistant.components.vlc_telnet.*
homeassistant.components.wake_on_lan.*
homeassistant.components.wake_word.*
homeassistant.components.wallbox.*
homeassistant.components.waqi.*
homeassistant.components.water_heater.*
homeassistant.components.watttime.*
homeassistant.components.weather.*
homeassistant.components.webhook.*
homeassistant.components.webostv.*
homeassistant.components.websocket_api.*
homeassistant.components.wemo.*
homeassistant.components.whois.*
homeassistant.components.withings.*
homeassistant.components.wiz.*
homeassistant.components.wled.*
homeassistant.components.worldclock.*
homeassistant.components.xiaomi_ble.*
homeassistant.components.yale_smart_alarm.*
homeassistant.components.yalexs_ble.*
homeassistant.components.youtube.*
homeassistant.components.zeroconf.*
homeassistant.components.zodiac.*
homeassistant.components.zeroconf.*
homeassistant.components.zone.*
homeassistant.components.zwave_js.*

View File

@@ -1,7 +1,3 @@
{
"recommendations": [
"charliermarsh.ruff",
"esbenp.prettier-vscode",
"ms-python.python"
]
"recommendations": ["esbenp.prettier-vscode", "ms-python.python"]
}

24
.vscode/launch.json vendored
View File

@@ -10,28 +10,10 @@
"request": "launch",
"module": "homeassistant",
"justMyCode": false,
"args": ["--debug", "-c", "config"],
"preLaunchTask": "Compile English translations"
"args": ["--debug", "-c", "config"]
},
{
"name": "Home Assistant (skip pip)",
"type": "python",
"request": "launch",
"module": "homeassistant",
"justMyCode": false,
"args": ["--debug", "-c", "config", "--skip-pip"],
"preLaunchTask": "Compile English translations"
},
{
"name": "Home Assistant: Changed tests",
"type": "python",
"request": "launch",
"module": "pytest",
"justMyCode": false,
"args": ["--timeout=10", "--picked"],
},
{
// Debug by attaching to local Home Assistant server using Remote Python Debugger.
// Debug by attaching to local Home Asistant server using Remote Python Debugger.
// See https://www.home-assistant.io/integrations/debugpy/
"name": "Home Assistant: Attach Local",
"type": "python",
@@ -46,7 +28,7 @@
]
},
{
// Debug by attaching to remote Home Assistant server using Remote Python Debugger.
// Debug by attaching to remote Home Asistant server using Remote Python Debugger.
// See https://www.home-assistant.io/integrations/debugpy/
"name": "Home Assistant: Attach Remote",
"type": "python",

View File

@@ -1,5 +1,6 @@
{
// Please keep this file in sync with settings in home-assistant/.devcontainer/devcontainer.json
"python.formatting.provider": "black",
// Added --no-cov to work around TypeError: message must be set
// https://github.com/microsoft/vscode-python/issues/14067
"python.testing.pytestArgs": ["--no-cov"],

94
.vscode/tasks.json vendored
View File

@@ -10,14 +10,15 @@
"reveal": "always",
"panel": "new"
},
"problemMatcher": [],
"dependsOn": ["Compile English translations"]
"problemMatcher": []
},
{
"label": "Pytest",
"type": "shell",
"command": "python3 -m pytest --timeout=10 tests",
"dependsOn": ["Install all Test Requirements"],
"command": "pytest --timeout=10 tests",
"dependsOn": [
"Install all Test Requirements"
],
"group": {
"kind": "test",
"isDefault": true
@@ -29,23 +30,9 @@
"problemMatcher": []
},
{
"label": "Pytest (changed tests only)",
"label": "Flake8",
"type": "shell",
"command": "python3 -m pytest --timeout=10 --picked",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff",
"type": "shell",
"command": "pre-commit run ruff --all-files",
"command": "pre-commit run flake8 --all-files",
"group": {
"kind": "test",
"isDefault": true
@@ -60,7 +47,9 @@
"label": "Pylint",
"type": "shell",
"command": "pylint homeassistant",
"dependsOn": ["Install all Requirements"],
"dependsOn": [
"Install all Requirements"
],
"group": {
"kind": "test",
"isDefault": true
@@ -75,7 +64,7 @@
"label": "Code Coverage",
"detail": "Generate code coverage report for a given integration.",
"type": "shell",
"command": "python3 -m pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing --durations-min=1 --durations=0 --numprocesses=auto",
"command": "pytest ./tests/components/${input:integrationName}/ --cov=homeassistant.components.${input:integrationName} --cov-report term-missing",
"group": {
"kind": "test",
"isDefault": true
@@ -127,50 +116,6 @@
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Compile English translations",
"detail": "In order to test changes to translation files, the translation strings must be compiled into Home Assistant's translation directories.",
"type": "shell",
"command": "python3 -m script.translations develop --all",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "Run scaffold",
"detail": "Add new functionality to a integration using a scaffold.",
"type": "shell",
"command": "python3 -m script.scaffold ${input:scaffoldName} --integration ${input:integrationName}",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "Create new integration",
"detail": "Use the scaffold to create a new integration.",
"type": "shell",
"command": "python3 -m script.scaffold integration",
"group": {
"kind": "build",
"isDefault": true
}
},
{
"label": "Install integration requirements",
"detail": "Install all requirements of a given integration.",
"type": "shell",
"command": "${command:python.interpreterPath} -m script.install_integration_requirements ${input:integrationName}",
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
}
}
],
"inputs": [
@@ -178,23 +123,6 @@
"id": "integrationName",
"type": "promptString",
"description": "For which integration should the task run?"
},
{
"id": "scaffoldName",
"type": "pickString",
"options": [
"backup",
"config_flow",
"config_flow_discovery",
"config_flow_helper",
"config_flow_oauth2",
"device_action",
"device_condition",
"device_trigger",
"reproduce_state",
"significant_change"
],
"description": "Which scaffold should be run?"
}
]
}

View File

@@ -1,5 +1,5 @@
ignore: |
tests/fixtures/core/config/yaml_errors/
azure-*.yml
rules:
braces:
level: error
@@ -25,7 +25,7 @@ rules:
comments:
level: error
require-starting-space: true
min-spaces-from-content: 1
min-spaces-from-content: 2
comments-indentation:
level: error
document-end:

2190
CODEOWNERS

File diff suppressed because it is too large Load Diff

View File

@@ -123,7 +123,7 @@ enforcement ladder][mozilla].
## Adoption
This Code of Conduct was first adopted on January 21st, 2017, and announced in
This Code of Conduct was first adopted January 21st, 2017 and announced in
[this][coc-blog] blog post and has been updated on May 25th, 2020 to version
2.0 of the [Contributor Covenant][homepage] as announced in [this][coc2-blog]
blog post.
@@ -132,8 +132,8 @@ For answers to common questions about this code of conduct, see the FAQ at
<https://www.contributor-covenant.org/faq>. Translations are available at
<https://www.contributor-covenant.org/translations>.
[coc-blog]: https://www.home-assistant.io/blog/2017/01/21/home-assistant-governance/
[coc2-blog]: https://www.home-assistant.io/blog/2020/05/25/code-of-conduct-updated/
[coc-blog]: /blog/2017/01/21/home-assistant-governance/
[coc2-blog]: /blog/2020/05/25/code-of-conduct-updated/
[email]: mailto:safety@home-assistant.io
[homepage]: http://contributor-covenant.org
[mozilla]: https://github.com/mozilla/diversity

View File

@@ -1,55 +1,35 @@
# Automatically generated by hassfest.
#
# To update, run python3 -m script.hassfest -p docker
ARG BUILD_FROM
FROM ${BUILD_FROM}
# Synchronize with homeassistant/core.py:async_stop
ENV \
S6_SERVICES_GRACETIME=240000
ARG QEMU_CPU
S6_SERVICES_GRACETIME=220000
WORKDIR /usr/src
## Setup Home Assistant Core dependencies
COPY requirements.txt homeassistant/
COPY homeassistant/package_constraints.txt homeassistant/homeassistant/
RUN \
pip3 install \
--only-binary=:all: \
-r homeassistant/requirements.txt
COPY requirements_all.txt home_assistant_frontend-* home_assistant_intents-* homeassistant/
RUN \
if ls homeassistant/home_assistant_frontend*.whl 1> /dev/null 2>&1; then \
pip3 install homeassistant/home_assistant_frontend-*.whl; \
fi \
&& if ls homeassistant/home_assistant_intents*.whl 1> /dev/null 2>&1; then \
pip3 install homeassistant/home_assistant_intents-*.whl; \
fi \
&& if [ "${BUILD_ARCH}" = "i386" ]; then \
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
linux32 pip3 install \
--only-binary=:all: \
-r homeassistant/requirements_all.txt; \
else \
LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" \
MALLOC_CONF="background_thread:true,metadata_thp:auto,dirty_decay_ms:20000,muzzy_decay_ms:20000" \
pip3 install \
--only-binary=:all: \
-r homeassistant/requirements_all.txt; \
fi
## Setup Home Assistant Core
## Setup Home Assistant
COPY . homeassistant/
RUN \
pip3 install \
--only-binary=:all: \
-e ./homeassistant \
&& python3 -m compileall \
homeassistant/homeassistant
pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
-r homeassistant/requirements_all.txt \
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links "${WHEELS_LINKS}" \
-e ./homeassistant \
&& python3 -m compileall homeassistant/homeassistant
# Fix Bug with Alpine 3.14 and sqlite 3.35
# https://gitlab.alpinelinux.org/alpine/aports/-/issues/12524
ARG BUILD_ARCH
RUN \
if [ "${BUILD_ARCH}" = "amd64" ]; then \
export APK_ARCH=x86_64; \
elif [ "${BUILD_ARCH}" = "i386" ]; then \
export APK_ARCH=x86; \
else \
export APK_ARCH=${BUILD_ARCH}; \
fi \
&& curl -O http://dl-cdn.alpinelinux.org/alpine/v3.13/main/${APK_ARCH}/sqlite-libs-3.34.1-r0.apk \
&& apk add --no-cache sqlite-libs-3.34.1-r0.apk \
&& rm -f sqlite-libs-3.34.1-r0.apk
# Home Assistant S6-Overlay
COPY rootfs /

View File

@@ -1,22 +1,13 @@
FROM mcr.microsoft.com/devcontainers/python:1-3.12
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.9
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Uninstall pre-installed formatting and linting tools
# They would conflict with our pinned versions
RUN \
pipx uninstall pydocstyle \
&& pipx uninstall pycodestyle \
&& pipx uninstall mypy \
&& pipx uninstall pylint
RUN \
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
# Additional library needed by some tests and accordingly by VScode Tests Discovery
bluez \
ffmpeg \
libudev-dev \
libavformat-dev \
libavcodec-dev \
@@ -26,11 +17,7 @@ RUN \
libswresample-dev \
libavfilter-dev \
libpcap-dev \
libturbojpeg0 \
libyaml-dev \
libxml2 \
git \
cmake \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
@@ -43,12 +30,11 @@ RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
WORKDIR /workspaces
# Install Python dependencies from requirements
COPY requirements.txt ./
COPY requirements.txt requirements_test.txt requirements_test_pre_commit.txt ./
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
RUN pip3 install -r requirements.txt
COPY requirements_test.txt requirements_test_pre_commit.txt ./
RUN pip3 install -r requirements_test.txt
RUN rm -rf requirements.txt requirements_test.txt requirements_test_pre_commit.txt homeassistant/
RUN pip3 install -r requirements.txt \
&& pip3 install -r requirements_test.txt \
&& rm -rf requirements.txt requirements_test.txt requirements_test_pre_commit.txt homeassistant/
# Set the default shell to bash instead of sh
ENV SHELL /bin/bash

View File

@@ -1,3 +1,4 @@
include README.rst
include LICENSE.md
graft homeassistant
recursive-exclude * *.py[co]

View File

@@ -4,7 +4,7 @@ Home Assistant |Chat Status|
Open source home automation that puts local control and privacy first. Powered by a worldwide community of tinkerers and DIY enthusiasts. Perfect to run on a Raspberry Pi or a local server.
Check out `home-assistant.io <https://home-assistant.io>`__ for `a
demo <https://demo.home-assistant.io>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
demo <https://home-assistant.io/demo/>`__, `installation instructions <https://home-assistant.io/getting-started/>`__,
`tutorials <https://home-assistant.io/getting-started/automation/>`__ and `documentation <https://home-assistant.io/docs/>`__.
|screenshot-states|
@@ -12,7 +12,7 @@ demo <https://demo.home-assistant.io>`__, `installation instructions <https://ho
Featured integrations
---------------------
|screenshot-integrations|
|screenshot-components|
The system is built using a modular approach so support for other devices or actions can be implemented easily. See also the `section on architecture <https://developers.home-assistant.io/docs/architecture_index/>`__ and the `section on creating your own
components <https://developers.home-assistant.io/docs/creating_component_index/>`__.
@@ -21,8 +21,8 @@ If you run into issues while using Home Assistant or during development
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
:target: https://www.home-assistant.io/join-chat/
.. |screenshot-states| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-states.png
:target: https://demo.home-assistant.io
.. |screenshot-integrations| image:: https://raw.githubusercontent.com/home-assistant/core/dev/.github/assets/screenshot-integrations.png
:target: https://home-assistant.io/integrations/
:target: https://discord.gg/c5DvZ4e
.. |screenshot-states| image:: https://raw.github.com/home-assistant/home-assistant/master/docs/screenshots.png
:target: https://home-assistant.io/demo/
.. |screenshot-components| image:: https://raw.github.com/home-assistant/home-assistant/dev/docs/screenshot-components.png
:target: https://home-assistant.io/integrations/

22
build.json Normal file
View File

@@ -0,0 +1,22 @@
{
"image": "homeassistant/{arch}-homeassistant",
"shadow_repository": "ghcr.io/home-assistant",
"build_from": {
"aarch64": "ghcr.io/home-assistant/aarch64-homeassistant-base:2021.08.0",
"armhf": "ghcr.io/home-assistant/armhf-homeassistant-base:2021.08.0",
"armv7": "ghcr.io/home-assistant/armv7-homeassistant-base:2021.08.0",
"amd64": "ghcr.io/home-assistant/amd64-homeassistant-base:2021.08.0",
"i386": "ghcr.io/home-assistant/i386-homeassistant-base:2021.08.0"
},
"labels": {
"io.hass.type": "core",
"org.opencontainers.image.title": "Home Assistant",
"org.opencontainers.image.description": "Open-source home automation platform running on Python 3",
"org.opencontainers.image.source": "https://github.com/home-assistant/core",
"org.opencontainers.image.authors": "The Home Assistant Authors",
"org.opencontainers.image.url": "https://www.home-assistant.io/",
"org.opencontainers.image.documentation": "https://www.home-assistant.io/docs/",
"org.opencontainers.image.licenses": "Apache License 2.0"
},
"version_tag": true
}

View File

@@ -1,22 +0,0 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.02.1
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.02.1
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.02.1
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.02.1
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.02.1
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
cosign:
base_identity: https://github.com/home-assistant/docker/.*
identity: https://github.com/home-assistant/core/.*
labels:
io.hass.type: core
org.opencontainers.image.title: Home Assistant
org.opencontainers.image.description: Open-source home automation platform running on Python 3
org.opencontainers.image.source: https://github.com/home-assistant/core
org.opencontainers.image.authors: The Home Assistant Authors
org.opencontainers.image.url: https://www.home-assistant.io/
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
org.opencontainers.image.licenses: Apache License 2.0

View File

@@ -6,48 +6,4 @@ coverage:
default:
target: 90
threshold: 0.09
required:
target: auto
threshold: 1
paths:
- homeassistant/components/*/config_flow.py
- homeassistant/components/*/device_action.py
- homeassistant/components/*/device_condition.py
- homeassistant/components/*/device_trigger.py
- homeassistant/components/*/diagnostics.py
- homeassistant/components/*/group.py
- homeassistant/components/*/intent.py
- homeassistant/components/*/logbook.py
- homeassistant/components/*/media_source.py
- homeassistant/components/*/recorder.py
- homeassistant/components/*/scene.py
patch:
default:
target: auto
required:
target: 100
threshold: 0
paths:
- homeassistant/components/*/config_flow.py
- homeassistant/components/*/device_action.py
- homeassistant/components/*/device_condition.py
- homeassistant/components/*/device_trigger.py
- homeassistant/components/*/diagnostics.py
- homeassistant/components/*/group.py
- homeassistant/components/*/intent.py
- homeassistant/components/*/logbook.py
- homeassistant/components/*/media_source.py
- homeassistant/components/*/recorder.py
- homeassistant/components/*/scene.py
comment: false
# To make partial tests possible,
# we need to carry forward.
flag_management:
default_rules:
carryforward: false
individual_flags:
- name: full-suite
paths:
- ".*"
carryforward: true

230
docs/Makefile Normal file
View File

@@ -0,0 +1,230 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " livehtml to make standalone HTML files via sphinx-autobuild"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " epub3 to make an epub3"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
@echo " dummy to check syntax errors of document sources"
.PHONY: clean
clean:
rm -rf $(BUILDDIR)/*
.PHONY: html
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
.PHONY: livehtml
livehtml:
sphinx-autobuild -z ../homeassistant/ --port 0 -B -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
.PHONY: dirhtml
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
.PHONY: singlehtml
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
.PHONY: pickle
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
.PHONY: json
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
.PHONY: htmlhelp
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
.PHONY: qthelp
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Home-Assistant.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Home-Assistant.qhc"
.PHONY: applehelp
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
.PHONY: devhelp
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Home-Assistant"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Home-Assistant"
@echo "# devhelp"
.PHONY: epub
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
.PHONY: epub3
epub3:
$(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
@echo
@echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
.PHONY: latex
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
.PHONY: latexpdf
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: latexpdfja
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
.PHONY: text
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
.PHONY: man
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
.PHONY: texinfo
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
.PHONY: info
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
.PHONY: gettext
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
.PHONY: changes
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
.PHONY: linkcheck
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
.PHONY: doctest
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
.PHONY: coverage
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
.PHONY: xml
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
.PHONY: pseudoxml
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
.PHONY: dummy
dummy:
$(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
@echo
@echo "Build finished. Dummy builder generates no files."

281
docs/make.bat Normal file
View File

@@ -0,0 +1,281 @@
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
set I18NSPHINXOPTS=%SPHINXOPTS% source
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. epub3 to make an epub3
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
echo. coverage to run coverage check of the documentation if enabled
echo. dummy to check syntax errors of document sources
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
REM Check if sphinx-build is available and fallback to Python version if any
%SPHINXBUILD% 1>NUL 2>NUL
if errorlevel 9009 goto sphinx_python
goto sphinx_ok
:sphinx_python
set SPHINXBUILD=python -m sphinx.__init__
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
:sphinx_ok
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Home-Assistant.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Home-Assistant.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "epub3" (
%SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub3 file is in %BUILDDIR%/epub3.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %~dp0
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "coverage" (
%SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage
if errorlevel 1 exit /b 1
echo.
echo.Testing of coverage in the sources finished, look at the ^
results in %BUILDDIR%/coverage/python.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
if "%1" == "dummy" (
%SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy
if errorlevel 1 exit /b 1
echo.
echo.Build finished. Dummy builder generates no files.
goto end
)
:end

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

BIN
docs/screenshots.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 226 KiB

View File

@@ -0,0 +1,44 @@
"""
Sphinx extension to add ReadTheDocs-style "Edit on GitHub" links to the
sidebar.
Loosely based on https://github.com/astropy/astropy/pull/347
"""
import os
import warnings
__licence__ = "BSD (3 clause)"
def get_github_url(app, view, path):
return (
f"https://github.com/{app.config.edit_on_github_project}/"
f"{view}/{app.config.edit_on_github_branch}/"
f"{app.config.edit_on_github_src_path}{path}"
)
def html_page_context(app, pagename, templatename, context, doctree):
if templatename != "page.html":
return
if not app.config.edit_on_github_project:
warnings.warn("edit_on_github_project not specified")
return
if not doctree:
warnings.warn("doctree is None")
return
path = os.path.relpath(doctree.get("source"), app.builder.srcdir)
show_url = get_github_url(app, "blob", path)
edit_url = get_github_url(app, "edit", path)
context["show_on_github_url"] = show_url
context["edit_on_github_url"] = edit_url
def setup(app):
app.add_config_value("edit_on_github_project", "", True)
app.add_config_value("edit_on_github_branch", "master", True)
app.add_config_value("edit_on_github_src_path", "", True) # 'eg' "docs/"
app.connect("html-page-context", html_page_context)

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -0,0 +1,6 @@
<ul>
<li><a href="https://home-assistant.io/">Homepage</a></li>
<li><a href="https://community.home-assistant.io">Community Forums</a></li>
<li><a href="https://github.com/home-assistant/core">GitHub</a></li>
<li><a href="https://discord.gg/c5DvZ4e">Discord</a></li>
</ul>

View File

@@ -0,0 +1,13 @@
{%- if show_source and has_source and sourcename %}
<h3>{{ _('This Page') }}</h3>
<ul class="this-page-menu">
{%- if show_on_github_url %}
<li><a href="{{ show_on_github_url }}"
rel="nofollow">{{ _('Show on GitHub') }}</a></li>
{%- endif %}
{%- if edit_on_github_url %}
<li><a href="{{ edit_on_github_url }}"
rel="nofollow">{{ _('Edit on GitHub') }}</a></li>
{%- endif %}
</ul>
{%- endif %}

29
docs/source/api/auth.rst Normal file
View File

@@ -0,0 +1,29 @@
:mod:`homeassistant.auth`
=========================
.. automodule:: homeassistant.auth
:members:
homeassistant.auth.auth\_store
------------------------------
.. automodule:: homeassistant.auth.auth_store
:members:
:undoc-members:
:show-inheritance:
homeassistant.auth.const
------------------------
.. automodule:: homeassistant.auth.const
:members:
:undoc-members:
:show-inheritance:
homeassistant.auth.models
-------------------------
.. automodule:: homeassistant.auth.models
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,7 @@
.. _bootstrap_module:
:mod:`homeassistant.bootstrap`
------------------------------
.. automodule:: homeassistant.bootstrap
:members:

View File

@@ -0,0 +1,170 @@
:mod:`homeassistant.components`
===============================
air\_quality
--------------------------------------------
.. automodule:: homeassistant.components.air_quality
:members:
:undoc-members:
:show-inheritance:
alarm\_control\_panel
--------------------------------------------
.. automodule:: homeassistant.components.alarm_control_panel
:members:
:undoc-members:
:show-inheritance:
binary\_sensor
--------------------------------------------
.. automodule:: homeassistant.components.binary_sensor
:members:
:undoc-members:
:show-inheritance:
camera
---------------------------
.. automodule:: homeassistant.components.camera
:members:
:undoc-members:
:show-inheritance:
calendar
---------------------------
.. automodule:: homeassistant.components.calendar
:members:
:undoc-members:
:show-inheritance:
climate
---------------------------
.. automodule:: homeassistant.components.climate
:members:
:undoc-members:
:show-inheritance:
conversation
---------------------------
.. automodule:: homeassistant.components.conversation
:members:
:undoc-members:
:show-inheritance:
cover
---------------------------
.. automodule:: homeassistant.components.cover
:members:
:undoc-members:
:show-inheritance:
device\_tracker
---------------------------
.. automodule:: homeassistant.components.device_tracker
:members:
:undoc-members:
:show-inheritance:
fan
---------------------------
.. automodule:: homeassistant.components.fan
:members:
:undoc-members:
:show-inheritance:
light
---------------------------
.. automodule:: homeassistant.components.light
:members:
:undoc-members:
:show-inheritance:
lock
---------------------------
.. automodule:: homeassistant.components.lock
:members:
:undoc-members:
:show-inheritance:
media\_player
---------------------------
.. automodule:: homeassistant.components.media_player
:members:
:undoc-members:
:show-inheritance:
notify
---------------------------
.. automodule:: homeassistant.components.notify
:members:
:undoc-members:
:show-inheritance:
remote
---------------------------
.. automodule:: homeassistant.components.remote
:members:
:undoc-members:
:show-inheritance:
switch
---------------------------
.. automodule:: homeassistant.components.switch
:members:
:undoc-members:
:show-inheritance:
sensor
-------------------------------------
.. automodule:: homeassistant.components.sensor
:members:
:undoc-members:
:show-inheritance:
vacuum
-------------------------------------
.. automodule:: homeassistant.components.vacuum
:members:
:undoc-members:
:show-inheritance:
water\_heater
-------------------------------------
.. automodule:: homeassistant.components.water_heater
:members:
:undoc-members:
:show-inheritance:
weather
---------------------------
.. automodule:: homeassistant.components.weather
:members:
:undoc-members:
:show-inheritance:
webhook
---------------------------
.. automodule:: homeassistant.components.webhook
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,7 @@
.. _config_entries_module:
:mod:`homeassistant.config_entries`
-----------------------------------
.. automodule:: homeassistant.config_entries
:members:

7
docs/source/api/core.rst Normal file
View File

@@ -0,0 +1,7 @@
.. _core_module:
:mod:`homeassistant.core`
-------------------------
.. automodule:: homeassistant.core
:members:

View File

@@ -0,0 +1,7 @@
.. _data_entry_flow_module:
:mod:`homeassistant.data_entry_flow`
-----------------------------
.. automodule:: homeassistant.data_entry_flow
:members:

View File

@@ -0,0 +1,7 @@
.. _exceptions_module:
:mod:`homeassistant.exceptions`
-------------------------------
.. automodule:: homeassistant.exceptions
:members:

335
docs/source/api/helpers.rst Normal file
View File

@@ -0,0 +1,335 @@
:mod:`homeassistant.helpers`
============================
.. automodule:: homeassistant.helpers
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.aiohttp\_client
-------------------------------------
.. automodule:: homeassistant.helpers.aiohttp_client
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.area\_registry
------------------------------------
.. automodule:: homeassistant.helpers.area_registry
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.check\_config
-----------------------------------
.. automodule:: homeassistant.helpers.check_config
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.collection
--------------------------------
.. automodule:: homeassistant.helpers.collection
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.condition
-------------------------------
.. automodule:: homeassistant.helpers.condition
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.config\_entry\_flow
-----------------------------------------
.. automodule:: homeassistant.helpers.config_entry_flow
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.config\_entry\_oauth2\_flow
-------------------------------------------------
.. automodule:: homeassistant.helpers.config_entry_oauth2_flow
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.config\_validation
----------------------------------------
.. automodule:: homeassistant.helpers.config_validation
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.data\_entry\_flow
---------------------------------------
.. automodule:: homeassistant.helpers.data_entry_flow
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.debounce
------------------------------
.. automodule:: homeassistant.helpers.debounce
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.deprecation
---------------------------------
.. automodule:: homeassistant.helpers.deprecation
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.device\_registry
--------------------------------------
.. automodule:: homeassistant.helpers.device_registry
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.discovery
-------------------------------
.. automodule:: homeassistant.helpers.discovery
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.dispatcher
--------------------------------
.. automodule:: homeassistant.helpers.dispatcher
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.entity
----------------------------
.. automodule:: homeassistant.helpers.entity
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.entity\_component
---------------------------------------
.. automodule:: homeassistant.helpers.entity_component
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.entity\_platform
--------------------------------------
.. automodule:: homeassistant.helpers.entity_platform
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.entity\_registry
--------------------------------------
.. automodule:: homeassistant.helpers.entity_registry
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.entity\_values
------------------------------------
.. automodule:: homeassistant.helpers.entity_values
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.entityfilter
----------------------------------
.. automodule:: homeassistant.helpers.entityfilter
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.event
---------------------------
.. automodule:: homeassistant.helpers.event
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.icon
--------------------------
.. automodule:: homeassistant.helpers.icon
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.integration\_platform
-------------------------------------------
.. automodule:: homeassistant.helpers.integration_platform
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.intent
----------------------------
.. automodule:: homeassistant.helpers.intent
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.json
--------------------------
.. automodule:: homeassistant.helpers.json
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.location
------------------------------
.. automodule:: homeassistant.helpers.location
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.logging
-----------------------------
.. automodule:: homeassistant.helpers.logging
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.network
-----------------------------
.. automodule:: homeassistant.helpers.network
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.restore\_state
------------------------------------
.. automodule:: homeassistant.helpers.restore_state
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.script
----------------------------
.. automodule:: homeassistant.helpers.script
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.service
-----------------------------
.. automodule:: homeassistant.helpers.service
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.signal
-----------------------------
.. automodule:: homeassistant.helpers.signal
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.state
---------------------------
.. automodule:: homeassistant.helpers.state
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.storage
-----------------------------
.. automodule:: homeassistant.helpers.storage
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.sun
-------------------------
.. automodule:: homeassistant.helpers.sun
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.system\_info
----------------------------------
.. automodule:: homeassistant.helpers.system_info
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.temperature
---------------------------------
.. automodule:: homeassistant.helpers.temperature
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.template
------------------------------
.. automodule:: homeassistant.helpers.template
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.translation
---------------------------------
.. automodule:: homeassistant.helpers.translation
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.typing
----------------------------
.. automodule:: homeassistant.helpers.typing
:members:
:undoc-members:
:show-inheritance:
homeassistant.helpers.update\_coordinator
-----------------------------------------
.. automodule:: homeassistant.helpers.update_coordinator
:members:
:undoc-members:
:show-inheritance:

View File

@@ -0,0 +1,7 @@
.. _loader_module:
:mod:`homeassistant.loader`
---------------------------
.. automodule:: homeassistant.loader
:members:

151
docs/source/api/util.rst Normal file
View File

@@ -0,0 +1,151 @@
:mod:`homeassistant.util`
=========================
.. automodule:: homeassistant.util
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.yaml
-----------------------
.. automodule:: homeassistant.util.yaml
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.aiohttp
--------------------------
.. automodule:: homeassistant.util.aiohttp
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.async\_
--------------------------
.. automodule:: homeassistant.util.async_
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.color
------------------------
.. automodule:: homeassistant.util.color
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.decorator
----------------------------
.. automodule:: homeassistant.util.decorator
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.distance
---------------------------
.. automodule:: homeassistant.util.distance
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.dt
---------------------
.. automodule:: homeassistant.util.dt
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.json
-----------------------
.. automodule:: homeassistant.util.json
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.location
---------------------------
.. automodule:: homeassistant.util.location
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.logging
--------------------------
.. automodule:: homeassistant.util.logging
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.network
--------------------------
.. automodule:: homeassistant.util.network
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.package
--------------------------
.. automodule:: homeassistant.util.package
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.pil
----------------------
.. automodule:: homeassistant.util.pil
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.pressure
---------------------------
.. automodule:: homeassistant.util.pressure
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.ssl
----------------------
.. automodule:: homeassistant.util.ssl
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.temperature
------------------------------
.. automodule:: homeassistant.util.temperature
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.unit\_system
-------------------------------
.. automodule:: homeassistant.util.unit_system
:members:
:undoc-members:
:show-inheritance:
homeassistant.util.volume
-------------------------
.. automodule:: homeassistant.util.volume
:members:
:undoc-members:
:show-inheritance:

439
docs/source/conf.py Normal file
View File

@@ -0,0 +1,439 @@
#!/usr/bin/env python3
#
# Home-Assistant documentation build configuration file, created by
# sphinx-quickstart on Sun Aug 28 13:13:10 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import inspect
import os
import sys
from homeassistant.const import __short_version__, __version__
PROJECT_NAME = "Home Assistant"
PROJECT_PACKAGE_NAME = "homeassistant"
PROJECT_AUTHOR = "The Home Assistant Authors"
PROJECT_COPYRIGHT = f" 2013-2020, {PROJECT_AUTHOR}"
PROJECT_LONG_DESCRIPTION = (
"Home Assistant is an open-source "
"home automation platform running on Python 3. "
"Track and control all devices at home and "
"automate control. "
"Installation in less than a minute."
)
PROJECT_GITHUB_USERNAME = "home-assistant"
PROJECT_GITHUB_REPOSITORY = "home-assistant"
GITHUB_PATH = f"{PROJECT_GITHUB_USERNAME}/{PROJECT_GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
sys.path.insert(0, os.path.abspath("_ext"))
sys.path.insert(0, os.path.abspath("../homeassistant"))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.linkcode",
"sphinx_autodoc_annotation",
"edit_on_github",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = PROJECT_NAME
copyright = PROJECT_COPYRIGHT
author = PROJECT_AUTHOR
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __short_version__
# The full version, including alpha/beta/rc tags.
release = __version__
code_branch = "dev" if "dev" in __version__ else "master"
# Edit on Github config
edit_on_github_project = GITHUB_PATH
edit_on_github_branch = code_branch
edit_on_github_src_path = "docs/source/"
def linkcode_resolve(domain, info):
"""Determine the URL corresponding to Python object."""
if domain != "py":
return None
modname = info["module"]
fullname = info["fullname"]
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split("."):
try:
obj = getattr(obj, part)
except:
return None
try:
fn = inspect.getsourcefile(obj)
except:
fn = None
if not fn:
return None
try:
source, lineno = inspect.findsource(obj)
except:
lineno = None
if lineno:
linespec = "#L%d" % (lineno + 1)
else:
linespec = ""
index = fn.find("/homeassistant/")
if index == -1:
index = 0
fn = fn[index:]
return f"{GITHUB_URL}/blob/{code_branch}/{fn}{linespec}"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
"logo": "logo.png",
"logo_name": PROJECT_NAME,
"description": PROJECT_LONG_DESCRIPTION,
"github_user": PROJECT_GITHUB_USERNAME,
"github_repo": PROJECT_GITHUB_REPOSITORY,
"github_type": "star",
"github_banner": True,
"touch_icon": "logo-apple.png",
# 'fixed_sidebar': True, # Re-enable when we have more content
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'Home-Assistant v0.27.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = '_static/logo.png'
# The name of an image file (relative to this directory) to use as a favicon of
# the docs.
# This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
html_favicon = "_static/favicon.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
html_last_updated_fmt = "%b %d, %Y"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
html_sidebars = {
"**": [
"about.html",
"links.html",
"searchbox.html",
"sourcelink.html",
"navigation.html",
"relations.html",
]
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "Home-Assistantdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"home-assistant.tex",
"Home Assistant Documentation",
"Home Assistant Team",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, "home-assistant", "Home Assistant Documentation", [author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"Home-Assistant",
"Home Assistant Documentation",
author,
"Home Assistant",
"Open-source home automation platform.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False

22
docs/source/index.rst Normal file
View File

@@ -0,0 +1,22 @@
================================
Home Assistant API Documentation
================================
Public API documentation for `Home Assistant developers`_.
Contents:
.. toctree::
:maxdepth: 2
:glob:
api/*
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
.. _Home Assistant developers: https://developers.home-assistant.io/

View File

@@ -1,26 +1,14 @@
"""Start Home Assistant."""
from __future__ import annotations
import argparse
import faulthandler
import os
import platform
import subprocess
import sys
import threading
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
FAULT_LOG_FILENAME = "home-assistant.log.fault"
def validate_os() -> None:
"""Validate that Home Assistant is running in a supported operating system."""
if not sys.platform.startswith(("darwin", "linux")):
print(
"Home Assistant only supports Linux, OSX and Windows using WSL",
file=sys.stderr,
)
sys.exit(1)
from homeassistant.const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
def validate_python() -> None:
@@ -28,39 +16,33 @@ def validate_python() -> None:
if sys.version_info[:3] < REQUIRED_PYTHON_VER:
print(
"Home Assistant requires at least Python "
f"{REQUIRED_PYTHON_VER[0]}.{REQUIRED_PYTHON_VER[1]}.{REQUIRED_PYTHON_VER[2]}",
file=sys.stderr,
f"{REQUIRED_PYTHON_VER[0]}.{REQUIRED_PYTHON_VER[1]}.{REQUIRED_PYTHON_VER[2]}"
)
sys.exit(1)
def ensure_config_path(config_dir: str) -> None:
"""Validate the configuration directory."""
# pylint: disable-next=import-outside-toplevel
from . import config as config_util
# pylint: disable=import-outside-toplevel
import homeassistant.config as config_util
lib_dir = os.path.join(config_dir, "deps")
# Test if configuration directory exists
if not os.path.isdir(config_dir):
if config_dir != config_util.get_default_config_dir():
if os.path.exists(config_dir):
reason = "is not a directory"
else:
reason = "does not exist"
print(
f"Fatal Error: Specified configuration directory {config_dir} {reason}",
file=sys.stderr,
f"Fatal Error: Specified configuration directory {config_dir} "
"does not exist"
)
sys.exit(1)
try:
os.mkdir(config_dir)
except OSError as ex:
except OSError:
print(
"Fatal Error: Unable to create default configuration "
f"directory {config_dir}: {ex}",
file=sys.stderr,
f"directory {config_dir}"
)
sys.exit(1)
@@ -68,22 +50,18 @@ def ensure_config_path(config_dir: str) -> None:
if not os.path.isdir(lib_dir):
try:
os.mkdir(lib_dir)
except OSError as ex:
print(
f"Fatal Error: Unable to create library directory {lib_dir}: {ex}",
file=sys.stderr,
)
except OSError:
print(f"Fatal Error: Unable to create library directory {lib_dir}")
sys.exit(1)
def get_arguments() -> argparse.Namespace:
"""Get parsed passed in arguments."""
# pylint: disable-next=import-outside-toplevel
from . import config as config_util
# pylint: disable=import-outside-toplevel
import homeassistant.config as config_util
parser = argparse.ArgumentParser(
description="Home Assistant: Observe, Control, Automate.",
epilog=f"If restart is requested, exits with code {RESTART_EXIT_CODE}",
description="Home Assistant: Observe, Control, Automate."
)
parser.add_argument("--version", action="version", version=__version__)
parser.add_argument(
@@ -94,9 +72,7 @@ def get_arguments() -> argparse.Namespace:
help="Directory that contains the Home Assistant configuration",
)
parser.add_argument(
"--recovery-mode",
action="store_true",
help="Start Home Assistant in recovery mode",
"--safe-mode", action="store_true", help="Start Home Assistant in safe mode"
)
parser.add_argument(
"--debug", action="store_true", help="Start Home Assistant in debug mode"
@@ -104,24 +80,20 @@ def get_arguments() -> argparse.Namespace:
parser.add_argument(
"--open-ui", action="store_true", help="Open the webinterface in a browser"
)
skip_pip_group = parser.add_mutually_exclusive_group()
skip_pip_group.add_argument(
parser.add_argument(
"--skip-pip",
action="store_true",
help="Skips pip install of required packages on startup",
)
skip_pip_group.add_argument(
"--skip-pip-packages",
metavar="package_names",
type=lambda arg: arg.split(","),
default=[],
help="Skip pip install of specific packages on startup",
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="Enable verbose logging to file."
)
parser.add_argument(
"--pid-file",
metavar="path_to_pid_file",
default=None,
help="Path to PID file useful for running as daemon",
)
parser.add_argument(
"--log-rotate-days",
type=int,
@@ -138,21 +110,123 @@ def get_arguments() -> argparse.Namespace:
"--log-no-color", action="store_true", help="Disable color logs"
)
parser.add_argument(
"--script", nargs=argparse.REMAINDER, help="Run one of the embedded scripts"
"--runner",
action="store_true",
help=f"On restart exit with code {RESTART_EXIT_CODE}",
)
parser.add_argument(
"--ignore-os-check",
action="store_true",
help="Skips validation of operating system",
"--script", nargs=argparse.REMAINDER, help="Run one of the embedded scripts"
)
if os.name == "posix":
parser.add_argument(
"--daemon", action="store_true", help="Run Home Assistant as daemon"
)
arguments = parser.parse_args()
if os.name != "posix" or arguments.debug or arguments.runner:
setattr(arguments, "daemon", False)
return arguments
def check_threads() -> None:
"""Check if there are any lingering threads."""
def daemonize() -> None:
"""Move current process to daemon process."""
# Create first fork
pid = os.fork()
if pid > 0:
sys.exit(0)
# Decouple fork
os.setsid()
# Create second fork
pid = os.fork()
if pid > 0:
sys.exit(0)
# redirect standard file descriptors to devnull
# pylint: disable=consider-using-with
infd = open(os.devnull, encoding="utf8")
outfd = open(os.devnull, "a+", encoding="utf8")
sys.stdout.flush()
sys.stderr.flush()
os.dup2(infd.fileno(), sys.stdin.fileno())
os.dup2(outfd.fileno(), sys.stdout.fileno())
os.dup2(outfd.fileno(), sys.stderr.fileno())
def check_pid(pid_file: str) -> None:
"""Check that Home Assistant is not already running."""
# Check pid file
try:
with open(pid_file, encoding="utf8") as file:
pid = int(file.readline())
except OSError:
# PID File does not exist
return
# If we just restarted, we just found our own pidfile.
if pid == os.getpid():
return
try:
os.kill(pid, 0)
except OSError:
# PID does not exist
return
print("Fatal Error: Home Assistant is already running.")
sys.exit(1)
def write_pid(pid_file: str) -> None:
"""Create a PID File."""
pid = os.getpid()
try:
with open(pid_file, "w", encoding="utf8") as file:
file.write(str(pid))
except OSError:
print(f"Fatal Error: Unable to write pid file {pid_file}")
sys.exit(1)
def closefds_osx(min_fd: int, max_fd: int) -> None:
"""Make sure file descriptors get closed when we restart.
We cannot call close on guarded fds, and we cannot easily test which fds
are guarded. But we can set the close-on-exec flag on everything we want to
get rid of.
"""
# pylint: disable=import-outside-toplevel
from fcntl import F_GETFD, F_SETFD, FD_CLOEXEC, fcntl
for _fd in range(min_fd, max_fd):
try:
val = fcntl(_fd, F_GETFD)
if not val & FD_CLOEXEC:
fcntl(_fd, F_SETFD, val | FD_CLOEXEC)
except OSError:
pass
def cmdline() -> list[str]:
"""Collect path and arguments to re-execute the current hass instance."""
if os.path.basename(sys.argv[0]) == "__main__.py":
modulepath = os.path.dirname(sys.argv[0])
os.environ["PYTHONPATH"] = os.path.dirname(modulepath)
return [sys.executable] + [arg for arg in sys.argv if arg != "--daemon"]
return [arg for arg in sys.argv if arg != "--daemon"]
def try_to_restart() -> None:
"""Attempt to clean up state and start a new Home Assistant instance."""
# Things should be mostly shut down already at this point, now just try
# to clean up things that may have been left behind.
sys.stderr.write("Home Assistant attempting to restart.\n")
# Count remaining threads, ideally there should only be one non-daemonized
# thread left (which is us). Nothing we really do with it, but it might be
# useful when debugging shutdown/restart issues.
try:
nthreads = sum(
thread.is_alive() and not thread.daemon for thread in threading.enumerate()
@@ -166,29 +240,64 @@ def check_threads() -> None:
except AssertionError:
sys.stderr.write("Failed to count non-daemonic threads.\n")
# Try to not leave behind open filedescriptors with the emphasis on try.
try:
max_fd = os.sysconf("SC_OPEN_MAX")
except ValueError:
max_fd = 256
if platform.system() == "Darwin":
closefds_osx(3, max_fd)
else:
os.closerange(3, max_fd)
# Now launch into a new instance of Home Assistant. If this fails we
# fall through and exit with error 100 (RESTART_EXIT_CODE) in which case
# systemd will restart us when RestartForceExitStatus=100 is set in the
# systemd.service file.
sys.stderr.write("Restarting Home Assistant\n")
args = cmdline()
os.execv(args[0], args)
def main() -> int:
"""Start Home Assistant."""
validate_python()
# Run a simple daemon runner process on Windows to handle restarts
if os.name == "nt" and "--runner" not in sys.argv:
nt_args = cmdline() + ["--runner"]
while True:
try:
subprocess.check_call(nt_args)
sys.exit(0)
except KeyboardInterrupt:
sys.exit(0)
except subprocess.CalledProcessError as exc:
if exc.returncode != RESTART_EXIT_CODE:
sys.exit(exc.returncode)
args = get_arguments()
if not args.ignore_os_check:
validate_os()
if args.script is not None:
# pylint: disable-next=import-outside-toplevel
from . import scripts
# pylint: disable=import-outside-toplevel
from homeassistant import scripts
return scripts.run(args.script)
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
ensure_config_path(config_dir)
# pylint: disable-next=import-outside-toplevel
from . import config, runner
# Daemon functions
if args.pid_file:
check_pid(args.pid_file)
if args.daemon:
daemonize()
if args.pid_file:
write_pid(args.pid_file)
safe_mode = config.safe_mode_enabled(config_dir)
# pylint: disable=import-outside-toplevel
from homeassistant import runner
runtime_conf = runner.RuntimeConfig(
config_dir=config_dir,
@@ -197,23 +306,14 @@ def main() -> int:
log_file=args.log_file,
log_no_color=args.log_no_color,
skip_pip=args.skip_pip,
skip_pip_packages=args.skip_pip_packages,
recovery_mode=args.recovery_mode,
safe_mode=args.safe_mode,
debug=args.debug,
open_ui=args.open_ui,
safe_mode=safe_mode,
)
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
faulthandler.enable(fault_file)
exit_code = runner.run(runtime_conf)
faulthandler.disable()
if os.path.getsize(fault_file_name) == 0:
os.remove(fault_file_name)
check_threads()
exit_code = runner.run(runtime_conf)
if exit_code == RESTART_EXIT_CODE and not args.runner:
try_to_restart()
return exit_code

View File

@@ -1,41 +1,29 @@
"""Provide an authentication layer for Home Assistant."""
from __future__ import annotations
import asyncio
from collections import OrderedDict
from collections.abc import Mapping
from datetime import datetime, timedelta
from functools import partial
import time
from typing import Any, cast
from datetime import timedelta
from typing import Any, Dict, Mapping, Optional, Tuple, cast
import jwt
from homeassistant import data_entry_flow
from homeassistant.core import (
CALLBACK_TYPE,
HassJob,
HassJobType,
HomeAssistant,
callback,
)
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.util import dt as dt_util
from . import auth_store, jwt_wrapper, models
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN, REFRESH_TOKEN_EXPIRATION
from . import auth_store, models
from .const import ACCESS_TOKEN_EXPIRATION, GROUP_ID_ADMIN
from .mfa_modules import MultiFactorAuthModule, auth_mfa_module_from_config
from .models import AuthFlowResult
from .providers import AuthProvider, LoginFlow, auth_provider_from_config
EVENT_USER_ADDED = "user_added"
EVENT_USER_UPDATED = "user_updated"
EVENT_USER_REMOVED = "user_removed"
_MfaModuleDict = dict[str, MultiFactorAuthModule]
_ProviderKey = tuple[str, str | None]
_ProviderDict = dict[_ProviderKey, AuthProvider]
_MfaModuleDict = Dict[str, MultiFactorAuthModule]
_ProviderKey = Tuple[str, Optional[str]]
_ProviderDict = Dict[_ProviderKey, AuthProvider]
class InvalidAuthError(Exception):
@@ -57,7 +45,6 @@ async def auth_manager_from_config(
mfa modules exist in configs.
"""
store = auth_store.AuthStore(hass)
await store.async_load()
if provider_configs:
providers = await asyncio.gather(
*(
@@ -85,17 +72,12 @@ async def auth_manager_from_config(
module_hash[module.id] = module
manager = AuthManager(hass, store, provider_hash, module_hash)
manager.async_setup()
return manager
class AuthManagerFlowManager(
data_entry_flow.FlowManager[AuthFlowResult, tuple[str, str]]
):
class AuthManagerFlowManager(data_entry_flow.FlowManager):
"""Manage authentication flows."""
_flow_result = AuthFlowResult
def __init__(self, hass: HomeAssistant, auth_manager: AuthManager) -> None:
"""Init auth manager flows."""
super().__init__(hass)
@@ -103,11 +85,11 @@ class AuthManagerFlowManager(
async def async_create_flow(
self,
handler_key: tuple[str, str],
handler_key: Any,
*,
context: dict[str, Any] | None = None,
data: dict[str, Any] | None = None,
) -> LoginFlow:
) -> data_entry_flow.FlowHandler:
"""Create a login flow."""
auth_provider = self.auth_manager.get_auth_provider(*handler_key)
if not auth_provider:
@@ -115,14 +97,12 @@ class AuthManagerFlowManager(
return await auth_provider.async_login_flow(context)
async def async_finish_flow(
self,
flow: data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]],
result: AuthFlowResult,
) -> AuthFlowResult:
self, flow: data_entry_flow.FlowHandler, result: FlowResult
) -> FlowResult:
"""Return a user as result of login flow."""
flow = cast(LoginFlow, flow)
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
return result
# we got final result
@@ -175,22 +155,6 @@ class AuthManager:
self._providers = providers
self._mfa_modules = mfa_modules
self.login_flow = AuthManagerFlowManager(hass, self)
self._revoke_callbacks: dict[str, set[CALLBACK_TYPE]] = {}
self._expire_callback: CALLBACK_TYPE | None = None
self._remove_expired_job = HassJob(
self._async_remove_expired_refresh_tokens, job_type=HassJobType.Callback
)
@callback
def async_setup(self) -> None:
"""Set up the auth manager."""
hass = self.hass
hass.async_add_shutdown_job(
HassJob(
self._async_cancel_expiration_schedule, job_type=HassJobType.Callback
)
)
self._async_track_next_refresh_token_expiration()
@property
def auth_providers(self) -> list[AuthProvider]:
@@ -249,19 +213,11 @@ class AuthManager:
return None
async def async_create_system_user(
self,
name: str,
*,
group_ids: list[str] | None = None,
local_only: bool | None = None,
self, name: str, group_ids: list[str] | None = None
) -> models.User:
"""Create a system user."""
user = await self._store.async_create_user(
name=name,
system_generated=True,
is_active=True,
group_ids=group_ids or [],
local_only=local_only,
name=name, system_generated=True, is_active=True, group_ids=group_ids or []
)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
@@ -269,18 +225,13 @@ class AuthManager:
return user
async def async_create_user(
self,
name: str,
*,
group_ids: list[str] | None = None,
local_only: bool | None = None,
self, name: str, group_ids: list[str] | None = None
) -> models.User:
"""Create a user."""
kwargs: dict[str, Any] = {
"name": name,
"is_active": True,
"group_ids": group_ids or [],
"local_only": local_only,
}
if await self._user_should_be_owner():
@@ -313,8 +264,7 @@ class AuthManager:
credentials=credentials,
name=info.name,
is_active=info.is_active,
group_ids=[GROUP_ID_ADMIN if info.group is None else info.group],
local_only=info.local_only,
group_ids=[GROUP_ID_ADMIN],
)
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
@@ -325,12 +275,6 @@ class AuthManager:
self, user: models.User, credentials: models.Credentials
) -> None:
"""Link credentials to an existing user."""
linked_user = await self.async_get_user_by_credentials(credentials)
if linked_user == user:
return
if linked_user is not None:
raise ValueError("Credential is already linked to a user")
await self._store.async_link_user(user, credentials)
async def async_remove_user(self, user: models.User) -> None:
@@ -341,7 +285,7 @@ class AuthManager:
]
if tasks:
await asyncio.gather(*tasks)
await asyncio.wait(tasks)
await self._store.async_remove_user(user)
@@ -353,18 +297,13 @@ class AuthManager:
name: str | None = None,
is_active: bool | None = None,
group_ids: list[str] | None = None,
local_only: bool | None = None,
) -> None:
"""Update a user."""
kwargs: dict[str, Any] = {}
for attr_name, value in (
("name", name),
("group_ids", group_ids),
("local_only", local_only),
):
if value is not None:
kwargs[attr_name] = value
if name is not None:
kwargs["name"] = name
if group_ids is not None:
kwargs["group_ids"] = group_ids
await self._store.async_update_user(user, **kwargs)
if is_active is not None:
@@ -373,8 +312,6 @@ class AuthManager:
else:
await self.async_deactivate_user(user)
self.hass.bus.async_fire(EVENT_USER_UPDATED, {"user_id": user.id})
async def async_activate_user(self, user: models.User) -> None:
"""Activate a user."""
await self._store.async_activate_user(user)
@@ -390,7 +327,8 @@ class AuthManager:
provider = self._async_get_auth_provider(credentials)
if provider is not None and hasattr(provider, "async_will_remove_credentials"):
await provider.async_will_remove_credentials(credentials)
# https://github.com/python/mypy/issues/1424
await provider.async_will_remove_credentials(credentials) # type: ignore
await self._store.async_remove_credentials(credentials)
@@ -403,7 +341,8 @@ class AuthManager:
"System generated users cannot enable multi-factor auth module."
)
if (module := self.get_auth_mfa_module(mfa_module_id)) is None:
module = self.get_auth_mfa_module(mfa_module_id)
if module is None:
raise ValueError(f"Unable find multi-factor auth module: {mfa_module_id}")
await module.async_setup_user(user.id, data)
@@ -417,7 +356,8 @@ class AuthManager:
"System generated users cannot disable multi-factor auth module."
)
if (module := self.get_auth_mfa_module(mfa_module_id)) is None:
module = self.get_auth_mfa_module(mfa_module_id)
if module is None:
raise ValueError(f"Unable find multi-factor auth module: {mfa_module_id}")
await module.async_depose_user(user.id)
@@ -456,11 +396,6 @@ class AuthManager:
else:
token_type = models.TOKEN_TYPE_NORMAL
if token_type is models.TOKEN_TYPE_NORMAL:
expire_at = time.time() + REFRESH_TOKEN_EXPIRATION
else:
expire_at = None
if user.system_generated != (token_type == models.TOKEN_TYPE_SYSTEM):
raise ValueError(
"System generated users can only have system type refresh tokens"
@@ -492,81 +427,26 @@ class AuthManager:
client_icon,
token_type,
access_token_expiration,
expire_at,
credential,
)
@callback
def async_get_refresh_token(self, token_id: str) -> models.RefreshToken | None:
async def async_get_refresh_token(
self, token_id: str
) -> models.RefreshToken | None:
"""Get refresh token by id."""
return self._store.async_get_refresh_token(token_id)
return await self._store.async_get_refresh_token(token_id)
@callback
def async_get_refresh_token_by_token(
async def async_get_refresh_token_by_token(
self, token: str
) -> models.RefreshToken | None:
"""Get refresh token by token."""
return self._store.async_get_refresh_token_by_token(token)
return await self._store.async_get_refresh_token_by_token(token)
@callback
def async_remove_refresh_token(self, refresh_token: models.RefreshToken) -> None:
"""Delete a refresh token."""
self._store.async_remove_refresh_token(refresh_token)
callbacks = self._revoke_callbacks.pop(refresh_token.id, ())
for revoke_callback in callbacks:
revoke_callback()
@callback
def _async_remove_expired_refresh_tokens(self, _: datetime | None = None) -> None:
"""Remove expired refresh tokens."""
now = time.time()
for token in self._store.async_get_refresh_tokens():
if (expire_at := token.expire_at) is not None and expire_at <= now:
self.async_remove_refresh_token(token)
self._async_track_next_refresh_token_expiration()
@callback
def _async_track_next_refresh_token_expiration(self) -> None:
"""Initialise all token expiration scheduled tasks."""
next_expiration = time.time() + REFRESH_TOKEN_EXPIRATION
for token in self._store.async_get_refresh_tokens():
if (
expire_at := token.expire_at
) is not None and expire_at < next_expiration:
next_expiration = expire_at
self._expire_callback = async_track_point_in_utc_time(
self.hass,
self._remove_expired_job,
dt_util.utc_from_timestamp(next_expiration),
)
@callback
def _async_cancel_expiration_schedule(self) -> None:
"""Cancel tracking of expired refresh tokens."""
if self._expire_callback:
self._expire_callback()
self._expire_callback = None
@callback
def _async_unregister(
self, callbacks: set[CALLBACK_TYPE], callback_: CALLBACK_TYPE
async def async_remove_refresh_token(
self, refresh_token: models.RefreshToken
) -> None:
"""Unregister a callback."""
callbacks.remove(callback_)
@callback
def async_register_revoke_token_callback(
self, refresh_token_id: str, revoke_callback: CALLBACK_TYPE
) -> CALLBACK_TYPE:
"""Register a callback to be called when the refresh token id is revoked."""
if refresh_token_id not in self._revoke_callbacks:
self._revoke_callbacks[refresh_token_id] = set()
callbacks = self._revoke_callbacks[refresh_token_id]
callbacks.add(revoke_callback)
return partial(self._async_unregister, callbacks, revoke_callback)
"""Delete a refresh token."""
await self._store.async_remove_refresh_token(refresh_token)
@callback
def async_create_access_token(
@@ -577,17 +457,16 @@ class AuthManager:
self._store.async_log_refresh_token_usage(refresh_token, remote_ip)
now = int(time.time())
expire_seconds = int(refresh_token.access_token_expiration.total_seconds())
now = dt_util.utcnow()
return jwt.encode(
{
"iss": refresh_token.id,
"iat": now,
"exp": now + expire_seconds,
"exp": now + refresh_token.access_token_expiration,
},
refresh_token.jwt_key,
algorithm="HS256",
)
).decode()
@callback
def _async_resolve_provider(
@@ -607,8 +486,7 @@ class AuthManager:
)
if provider is None:
raise InvalidProvider(
f"Auth provider {refresh_token.credential.auth_provider_type},"
f" {refresh_token.credential.auth_provider_id} not available"
f"Auth provider {refresh_token.credential.auth_provider_type}, {refresh_token.credential.auth_provider_id} not available"
)
return provider
@@ -620,18 +498,20 @@ class AuthManager:
Will raise InvalidAuthError on errors.
"""
if provider := self._async_resolve_provider(refresh_token):
provider = self._async_resolve_provider(refresh_token)
if provider:
provider.async_validate_refresh_token(refresh_token, remote_ip)
@callback
def async_validate_access_token(self, token: str) -> models.RefreshToken | None:
async def async_validate_access_token(
self, token: str
) -> models.RefreshToken | None:
"""Return refresh token if an access token is valid."""
try:
unverif_claims = jwt_wrapper.unverified_hs256_token_decode(token)
unverif_claims = jwt.decode(token, verify=False)
except jwt.InvalidTokenError:
return None
refresh_token = self.async_get_refresh_token(
refresh_token = await self.async_get_refresh_token(
cast(str, unverif_claims.get("iss"))
)
@@ -643,9 +523,7 @@ class AuthManager:
issuer = refresh_token.id
try:
jwt_wrapper.verify_and_decode(
token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"]
)
jwt.decode(token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"])
except jwt.InvalidTokenError:
return None

View File

@@ -1,47 +1,30 @@
"""Storage for auth models."""
from __future__ import annotations
import asyncio
from collections import OrderedDict
from datetime import timedelta
import hmac
import itertools
from logging import getLogger
from typing import Any
from homeassistant.auth.const import ACCESS_TOKEN_EXPIRATION
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.storage import Store
from homeassistant.util import dt as dt_util
from . import models
from .const import (
ACCESS_TOKEN_EXPIRATION,
GROUP_ID_ADMIN,
GROUP_ID_READ_ONLY,
GROUP_ID_USER,
REFRESH_TOKEN_EXPIRATION,
)
from .permissions import system_policies
from .permissions.models import PermissionLookup
from .const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY, GROUP_ID_USER
from .permissions import PermissionLookup, system_policies
from .permissions.types import PolicyType
# mypy: disallow-any-generics
STORAGE_VERSION = 1
STORAGE_KEY = "auth"
GROUP_NAME_ADMIN = "Administrators"
GROUP_NAME_USER = "Users"
GROUP_NAME_READ_ONLY = "Read Only"
# We always save the auth store after we load it since
# we may migrate data and do not want to have to do it again
# but we don't want to do it during startup so we schedule
# the first save 5 minutes out knowing something else may
# want to save the auth store before then, and since Storage
# will honor the lower of the two delays, it will save it
# faster if something else saves it.
INITIAL_LOAD_SAVE_DELAY = 300
DEFAULT_SAVE_DELAY = 1
class AuthStore:
"""Stores authentication info.
@@ -55,28 +38,44 @@ class AuthStore:
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the auth store."""
self.hass = hass
self._loaded = False
self._users: dict[str, models.User] = None # type: ignore[assignment]
self._groups: dict[str, models.Group] = None # type: ignore[assignment]
self._perm_lookup: PermissionLookup = None # type: ignore[assignment]
self._store = Store[dict[str, list[dict[str, Any]]]](
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
self._users: dict[str, models.User] | None = None
self._groups: dict[str, models.Group] | None = None
self._perm_lookup: PermissionLookup | None = None
self._store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._lock = asyncio.Lock()
async def async_get_groups(self) -> list[models.Group]:
"""Retrieve all users."""
if self._groups is None:
await self._async_load()
assert self._groups is not None
return list(self._groups.values())
async def async_get_group(self, group_id: str) -> models.Group | None:
"""Retrieve all users."""
if self._groups is None:
await self._async_load()
assert self._groups is not None
return self._groups.get(group_id)
async def async_get_users(self) -> list[models.User]:
"""Retrieve all users."""
if self._users is None:
await self._async_load()
assert self._users is not None
return list(self._users.values())
async def async_get_user(self, user_id: str) -> models.User | None:
"""Retrieve a user by id."""
if self._users is None:
await self._async_load()
assert self._users is not None
return self._users.get(user_id)
async def async_create_user(
@@ -87,12 +86,18 @@ class AuthStore:
system_generated: bool | None = None,
credentials: models.Credentials | None = None,
group_ids: list[str] | None = None,
local_only: bool | None = None,
) -> models.User:
"""Create a new user."""
if self._users is None:
await self._async_load()
assert self._users is not None
assert self._groups is not None
groups = []
for group_id in group_ids or []:
if (group := self._groups.get(group_id)) is None:
group = self._groups.get(group_id)
if group is None:
raise ValueError(f"Invalid group specified {group_id}")
groups.append(group)
@@ -104,14 +109,14 @@ class AuthStore:
"perm_lookup": self._perm_lookup,
}
for attr_name, value in (
("is_owner", is_owner),
("is_active", is_active),
("local_only", local_only),
("system_generated", system_generated),
):
if value is not None:
kwargs[attr_name] = value
if is_owner is not None:
kwargs["is_owner"] = is_owner
if is_active is not None:
kwargs["is_active"] = is_active
if system_generated is not None:
kwargs["system_generated"] = system_generated
new_user = models.User(**kwargs)
@@ -135,6 +140,10 @@ class AuthStore:
async def async_remove_user(self, user: models.User) -> None:
"""Remove a user."""
if self._users is None:
await self._async_load()
assert self._users is not None
self._users.pop(user.id)
self._async_schedule_save()
@@ -144,23 +153,22 @@ class AuthStore:
name: str | None = None,
is_active: bool | None = None,
group_ids: list[str] | None = None,
local_only: bool | None = None,
) -> None:
"""Update a user."""
assert self._groups is not None
if group_ids is not None:
groups = []
for grid in group_ids:
if (group := self._groups.get(grid)) is None:
group = self._groups.get(grid)
if group is None:
raise ValueError("Invalid group specified.")
groups.append(group)
user.groups = groups
user.invalidate_permission_cache()
for attr_name, value in (
("name", name),
("is_active", is_active),
("local_only", local_only),
):
for attr_name, value in (("name", name), ("is_active", is_active)):
if value is not None:
setattr(user, attr_name, value)
@@ -178,6 +186,10 @@ class AuthStore:
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
"""Remove credentials."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
found = None
@@ -200,7 +212,6 @@ class AuthStore:
client_icon: str | None = None,
token_type: str = models.TOKEN_TYPE_NORMAL,
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
expire_at: float | None = None,
credential: models.Credentials | None = None,
) -> models.RefreshToken:
"""Create a new token for a user."""
@@ -209,7 +220,6 @@ class AuthStore:
"client_id": client_id,
"token_type": token_type,
"access_token_expiration": access_token_expiration,
"expire_at": expire_at,
"credential": credential,
}
if client_name:
@@ -223,17 +233,27 @@ class AuthStore:
self._async_schedule_save()
return refresh_token
@callback
def async_remove_refresh_token(self, refresh_token: models.RefreshToken) -> None:
async def async_remove_refresh_token(
self, refresh_token: models.RefreshToken
) -> None:
"""Remove a refresh token."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
if user.refresh_tokens.pop(refresh_token.id, None):
self._async_schedule_save()
break
@callback
def async_get_refresh_token(self, token_id: str) -> models.RefreshToken | None:
async def async_get_refresh_token(
self, token_id: str
) -> models.RefreshToken | None:
"""Get refresh token by id."""
if self._users is None:
await self._async_load()
assert self._users is not None
for user in self._users.values():
refresh_token = user.refresh_tokens.get(token_id)
if refresh_token is not None:
@@ -241,11 +261,14 @@ class AuthStore:
return None
@callback
def async_get_refresh_token_by_token(
async def async_get_refresh_token_by_token(
self, token: str
) -> models.RefreshToken | None:
"""Get refresh token by token."""
if self._users is None:
await self._async_load()
assert self._users is not None
found = None
for user in self._users.values():
@@ -255,15 +278,6 @@ class AuthStore:
return found
@callback
def async_get_refresh_tokens(self) -> list[models.RefreshToken]:
"""Get all refresh tokens."""
return list(
itertools.chain.from_iterable(
user.refresh_tokens.values() for user in self._users.values()
)
)
@callback
def async_log_refresh_token_usage(
self, refresh_token: models.RefreshToken, remote_ip: str | None = None
@@ -271,34 +285,37 @@ class AuthStore:
"""Update refresh token last used information."""
refresh_token.last_used_at = dt_util.utcnow()
refresh_token.last_used_ip = remote_ip
if refresh_token.expire_at:
refresh_token.expire_at = (
refresh_token.last_used_at.timestamp() + REFRESH_TOKEN_EXPIRATION
)
self._async_schedule_save()
async def async_load(self) -> None: # noqa: C901
async def _async_load(self) -> None:
"""Load the users."""
if self._loaded:
raise RuntimeError("Auth storage is already loaded")
self._loaded = True
async with self._lock:
if self._users is not None:
return
await self._async_load_task()
dev_reg = dr.async_get(self.hass)
ent_reg = er.async_get(self.hass)
data = await self._store.async_load()
async def _async_load_task(self) -> None:
"""Load the users."""
[ent_reg, dev_reg, data] = await asyncio.gather(
self.hass.helpers.entity_registry.async_get_registry(),
self.hass.helpers.device_registry.async_get_registry(),
self._store.async_load(),
)
perm_lookup = PermissionLookup(ent_reg, dev_reg)
self._perm_lookup = perm_lookup
# Make sure that we're not overriding data if 2 loads happened at the
# same time
if self._users is not None:
return
now_ts = dt_util.utcnow().timestamp()
self._perm_lookup = perm_lookup = PermissionLookup(ent_reg, dev_reg)
if data is None or not isinstance(data, dict):
if data is None:
self._set_defaults()
return
users: dict[str, models.User] = {}
groups: dict[str, models.Group] = {}
credentials: dict[str, models.Credentials] = {}
users: dict[str, models.User] = OrderedDict()
groups: dict[str, models.Group] = OrderedDict()
credentials: dict[str, models.Credentials] = OrderedDict()
# Soft-migrating data as we load. We are going to make sure we have a
# read only group and an admin group. There are two states that we can
@@ -402,8 +419,6 @@ class AuthStore:
is_active=user_dict["is_active"],
system_generated=user_dict["system_generated"],
perm_lookup=perm_lookup,
# New in 2021.11
local_only=user_dict.get("local_only", False),
)
for cred_dict in data["credentials"]:
@@ -425,34 +440,26 @@ class AuthStore:
created_at = dt_util.parse_datetime(rt_dict["created_at"])
if created_at is None:
getLogger(__name__).error(
(
"Ignoring refresh token %(id)s with invalid created_at "
"%(created_at)s for user_id %(user_id)s"
),
"Ignoring refresh token %(id)s with invalid created_at "
"%(created_at)s for user_id %(user_id)s",
rt_dict,
)
continue
if (token_type := rt_dict.get("token_type")) is None:
token_type = rt_dict.get("token_type")
if token_type is None:
if rt_dict["client_id"] is None:
token_type = models.TOKEN_TYPE_SYSTEM
else:
token_type = models.TOKEN_TYPE_NORMAL
# old refresh_token don't have last_used_at (pre-0.78)
if last_used_at_str := rt_dict.get("last_used_at"):
last_used_at_str = rt_dict.get("last_used_at")
if last_used_at_str:
last_used_at = dt_util.parse_datetime(last_used_at_str)
else:
last_used_at = None
if (
expire_at := rt_dict.get("expire_at")
) is None and token_type == models.TOKEN_TYPE_NORMAL:
if last_used_at:
expire_at = last_used_at.timestamp() + REFRESH_TOKEN_EXPIRATION
else:
expire_at = now_ts + REFRESH_TOKEN_EXPIRATION
token = models.RefreshToken(
id=rt_dict["id"],
user=users[rt_dict["user_id"]],
@@ -469,26 +476,28 @@ class AuthStore:
jwt_key=rt_dict["jwt_key"],
last_used_at=last_used_at,
last_used_ip=rt_dict.get("last_used_ip"),
expire_at=expire_at,
credential=credentials.get(rt_dict.get("credential_id")),
version=rt_dict.get("version"),
)
if "credential_id" in rt_dict:
token.credential = credentials.get(rt_dict["credential_id"])
users[rt_dict["user_id"]].refresh_tokens[token.id] = token
self._groups = groups
self._users = users
self._async_schedule_save(INITIAL_LOAD_SAVE_DELAY)
@callback
def _async_schedule_save(self, delay: float = DEFAULT_SAVE_DELAY) -> None:
def _async_schedule_save(self) -> None:
"""Save users."""
self._store.async_delay_save(self._data_to_save, delay)
if self._users is None:
return
self._store.async_delay_save(self._data_to_save, 1)
@callback
def _data_to_save(self) -> dict[str, list[dict[str, Any]]]:
"""Return the data to store."""
assert self._users is not None
assert self._groups is not None
users = [
{
"id": user.id,
@@ -497,7 +506,6 @@ class AuthStore:
"is_active": user.is_active,
"name": user.name,
"system_generated": user.system_generated,
"local_only": user.local_only,
}
for user in self._users.values()
]
@@ -536,16 +544,13 @@ class AuthStore:
"client_icon": refresh_token.client_icon,
"token_type": refresh_token.token_type,
"created_at": refresh_token.created_at.isoformat(),
"access_token_expiration": (
refresh_token.access_token_expiration.total_seconds()
),
"access_token_expiration": refresh_token.access_token_expiration.total_seconds(),
"token": refresh_token.token,
"jwt_key": refresh_token.jwt_key,
"last_used_at": refresh_token.last_used_at.isoformat()
if refresh_token.last_used_at
else None,
"last_used_ip": refresh_token.last_used_ip,
"expire_at": refresh_token.expire_at,
"credential_id": refresh_token.credential.id
if refresh_token.credential
else None,
@@ -564,9 +569,9 @@ class AuthStore:
def _set_defaults(self) -> None:
"""Set default values for auth store."""
self._users = {}
self._users = OrderedDict()
groups: dict[str, models.Group] = {}
groups: dict[str, models.Group] = OrderedDict()
admin_group = _system_admin_group()
groups[admin_group.id] = admin_group
user_group = _system_user_group()

View File

@@ -1,10 +1,8 @@
"""Constants for the auth module."""
from datetime import timedelta
ACCESS_TOKEN_EXPIRATION = timedelta(minutes=30)
MFA_SESSION_EXPIRATION = timedelta(minutes=5)
REFRESH_TOKEN_EXPIRATION = timedelta(days=90).total_seconds()
GROUP_ID_ADMIN = "system-admin"
GROUP_ID_USER = "system-users"

View File

@@ -1,117 +0,0 @@
"""Provide a wrapper around JWT that caches decoding tokens.
Since we decode the same tokens over and over again
we can cache the result of the decode of valid tokens
to speed up the process.
"""
from __future__ import annotations
from datetime import timedelta
from functools import lru_cache, partial
from typing import Any
from jwt import DecodeError, PyJWS, PyJWT
from homeassistant.util.json import json_loads
JWT_TOKEN_CACHE_SIZE = 16
MAX_TOKEN_SIZE = 8192
_VERIFY_KEYS = ("signature", "exp", "nbf", "iat", "aud", "iss")
_VERIFY_OPTIONS: dict[str, Any] = {f"verify_{key}": True for key in _VERIFY_KEYS} | {
"require": []
}
_NO_VERIFY_OPTIONS = {f"verify_{key}": False for key in _VERIFY_KEYS}
class _PyJWSWithLoadCache(PyJWS):
"""PyJWS with a dedicated load implementation."""
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
# We only ever have a global instance of this class
# so we do not have to worry about the LRU growing
# each time we create a new instance.
def _load(self, jwt: str | bytes) -> tuple[bytes, bytes, dict, bytes]:
"""Load a JWS."""
return super()._load(jwt)
_jws = _PyJWSWithLoadCache()
@lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)
def _decode_payload(json_payload: str) -> dict[str, Any]:
"""Decode the payload from a JWS dictionary."""
try:
payload = json_loads(json_payload)
except ValueError as err:
raise DecodeError(f"Invalid payload string: {err}") from err
if not isinstance(payload, dict):
raise DecodeError("Invalid payload string: must be a json object")
return payload
class _PyJWTWithVerify(PyJWT):
"""PyJWT with a fast decode implementation."""
def decode_payload(
self, jwt: str, key: str, options: dict[str, Any], algorithms: list[str]
) -> dict[str, Any]:
"""Decode a JWT's payload."""
if len(jwt) > MAX_TOKEN_SIZE:
# Avoid caching impossible tokens
raise DecodeError("Token too large")
return _decode_payload(
_jws.decode_complete(
jwt=jwt,
key=key,
algorithms=algorithms,
options=options,
)["payload"]
)
def verify_and_decode(
self,
jwt: str,
key: str,
algorithms: list[str],
issuer: str | None = None,
leeway: int | float | timedelta = 0,
options: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Verify a JWT's signature and claims."""
merged_options = {**_VERIFY_OPTIONS, **(options or {})}
payload = self.decode_payload(
jwt=jwt,
key=key,
options=merged_options,
algorithms=algorithms,
)
# These should never be missing since we verify them
# but this is an additional safeguard to make sure
# nothing slips through.
assert "exp" in payload, "exp claim is required"
assert "iat" in payload, "iat claim is required"
self._validate_claims(
payload=payload,
options=merged_options,
issuer=issuer,
leeway=leeway,
)
return payload
_jwt = _PyJWTWithVerify()
verify_and_decode = _jwt.verify_and_decode
unverified_hs256_token_decode = lru_cache(maxsize=JWT_TOKEN_CACHE_SIZE)(
partial(
_jwt.decode_payload, key="", algorithms=["HS256"], options=_NO_VERIFY_OPTIONS
)
)
__all__ = [
"unverified_hs256_token_decode",
"verify_and_decode",
]

View File

@@ -1,5 +1,4 @@
"""Pluggable auth modules for Home Assistant."""
from __future__ import annotations
import importlib
@@ -17,7 +16,7 @@ from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.decorator import Registry
MULTI_FACTOR_AUTH_MODULES: Registry[str, type[MultiFactorAuthModule]] = Registry()
MULTI_FACTOR_AUTH_MODULES = Registry()
MULTI_FACTOR_AUTH_MODULE_SCHEMA = vol.Schema(
{
@@ -51,17 +50,17 @@ class MultiFactorAuthModule:
Default is same as type
"""
return self.config.get(CONF_ID, self.type) # type: ignore[no-any-return]
return self.config.get(CONF_ID, self.type)
@property
def type(self) -> str:
"""Return type of the module."""
return self.config[CONF_TYPE] # type: ignore[no-any-return]
return self.config[CONF_TYPE] # type: ignore
@property
def name(self) -> str:
"""Return the name of the auth module."""
return self.config.get(CONF_NAME, self.DEFAULT_TITLE) # type: ignore[no-any-return]
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
# Implement by extending class
@@ -117,7 +116,9 @@ class SetupFlow(data_entry_flow.FlowHandler):
if user_input:
result = await self._auth_module.async_setup_user(self._user_id, user_input)
return self.async_create_entry(data={"result": result})
return self.async_create_entry(
title=self._auth_module.name, data={"result": result}
)
return self.async_show_form(
step_id="init", data_schema=self._setup_schema, errors=errors
@@ -128,11 +129,11 @@ async def auth_mfa_module_from_config(
hass: HomeAssistant, config: dict[str, Any]
) -> MultiFactorAuthModule:
"""Initialize an auth module from a config."""
module_name: str = config[CONF_TYPE]
module_name = config[CONF_TYPE]
module = await _load_mfa_module(hass, module_name)
try:
config = module.CONFIG_SCHEMA(config)
config = module.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as err:
_LOGGER.error(
"Invalid configuration for multi-factor module %s: %s",
@@ -141,7 +142,7 @@ async def auth_mfa_module_from_config(
)
raise
return MULTI_FACTOR_AUTH_MODULES[module_name](hass, config)
return MULTI_FACTOR_AUTH_MODULES[module_name](hass, config) # type: ignore
async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.ModuleType:
@@ -165,8 +166,9 @@ async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.Modul
processed = hass.data[DATA_REQS] = set()
# https://github.com/python/mypy/issues/1424
await requirements.async_process_requirements(
hass, module_path, module.REQUIREMENTS
hass, module_path, module.REQUIREMENTS # type: ignore
)
processed.add(module_name)

View File

@@ -1,5 +1,4 @@
"""Example auth module."""
from __future__ import annotations
from typing import Any
@@ -39,12 +38,12 @@ class InsecureExampleModule(MultiFactorAuthModule):
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({vol.Required("pin"): str})
return vol.Schema({"pin": str})
@property
def setup_schema(self) -> vol.Schema:
"""Validate async_setup_user input data."""
return vol.Schema({vol.Required("pin"): str})
return vol.Schema({"pin": str})
async def async_setup_flow(self, user_id: str) -> SetupFlow:
"""Return a data entry flow handler for setup module.

View File

@@ -2,13 +2,12 @@
Sending HOTP through notify service
"""
from __future__ import annotations
import asyncio
from collections import OrderedDict
import logging
from typing import Any, cast
from typing import Any, Dict
import attr
import voluptuous as vol
@@ -18,7 +17,6 @@ from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import ServiceNotFound
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.storage import Store
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
@@ -27,7 +25,7 @@ from . import (
SetupFlow,
)
REQUIREMENTS = ["pyotp==2.8.0"]
REQUIREMENTS = ["pyotp==2.3.0"]
CONF_MESSAGE = "message"
@@ -58,10 +56,10 @@ def _generate_secret() -> str:
def _generate_random() -> int:
"""Generate a 32 digit number."""
"""Generate a 8 digit number."""
import pyotp # pylint: disable=import-outside-toplevel
return int(pyotp.random_base32(length=32, chars=list("1234567890")))
return int(pyotp.random_base32(length=8, chars=list("1234567890")))
def _generate_otp(secret: str, count: int) -> str:
@@ -88,7 +86,7 @@ class NotifySetting:
target: str | None = attr.ib(default=None)
_UsersDict = dict[str, NotifySetting]
_UsersDict = Dict[str, NotifySetting]
@MULTI_FACTOR_AUTH_MODULES.register("notify")
@@ -101,8 +99,8 @@ class NotifyAuthModule(MultiFactorAuthModule):
"""Initialize the user data store."""
super().__init__(hass, config)
self._user_settings: _UsersDict | None = None
self._user_store = Store[dict[str, dict[str, Any]]](
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
self._user_store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._include = config.get(CONF_INCLUDE, [])
self._exclude = config.get(CONF_EXCLUDE, [])
@@ -112,7 +110,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({vol.Required(INPUT_FIELD_CODE): str})
return vol.Schema({INPUT_FIELD_CODE: str})
async def _async_load(self) -> None:
"""Load stored data."""
@@ -120,8 +118,10 @@ class NotifyAuthModule(MultiFactorAuthModule):
if self._user_settings is not None:
return
if (data := await self._user_store.async_load()) is None:
data = cast(dict[str, dict[str, Any]], {STORAGE_USERS: {}})
data = await self._user_store.async_load()
if data is None:
data = {STORAGE_USERS: {}}
self._user_settings = {
user_id: NotifySetting(**setting)
@@ -153,7 +153,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
"""Return list of notify services."""
unordered_services = set()
for service in self.hass.services.async_services_for_domain("notify"):
for service in self.hass.services.async_services().get("notify", {}):
if service not in self._exclude:
unordered_services.add(service)
@@ -207,7 +207,8 @@ class NotifyAuthModule(MultiFactorAuthModule):
await self._async_load()
assert self._user_settings is not None
if (notify_setting := self._user_settings.get(user_id)) is None:
notify_setting = self._user_settings.get(user_id)
if notify_setting is None:
return False
# user_input has been validate in caller
@@ -224,7 +225,8 @@ class NotifyAuthModule(MultiFactorAuthModule):
await self._async_load()
assert self._user_settings is not None
if (notify_setting := self._user_settings.get(user_id)) is None:
notify_setting = self._user_settings.get(user_id)
if notify_setting is None:
raise ValueError("Cannot find user_id")
def generate_secret_and_one_time_password() -> str:
@@ -247,13 +249,14 @@ class NotifyAuthModule(MultiFactorAuthModule):
await self._async_load()
assert self._user_settings is not None
if (notify_setting := self._user_settings.get(user_id)) is None:
notify_setting = self._user_settings.get(user_id)
if notify_setting is None:
_LOGGER.error("Cannot find user %s", user_id)
return
await self.async_notify(
code,
notify_setting.notify_service, # type: ignore[arg-type]
notify_setting.notify_service, # type: ignore
notify_setting.target,
)
@@ -321,7 +324,6 @@ class NotifySetupFlow(SetupFlow):
errors: dict[str, str] = {}
hass = self._auth_module.hass
assert self._secret and self._count
if user_input:
verified = await hass.async_add_executor_job(
_verify_otp, self._secret, user_input["code"], self._count
@@ -331,11 +333,12 @@ class NotifySetupFlow(SetupFlow):
self._user_id,
{"notify_service": self._notify_service, "target": self._target},
)
return self.async_create_entry(data={})
return self.async_create_entry(title=self._auth_module.name, data={})
errors["base"] = "invalid_code"
# generate code every time, no retry logic
assert self._secret and self._count
code = await hass.async_add_executor_job(
_generate_otp, self._secret, self._count
)

View File

@@ -1,17 +1,15 @@
"""Time-based One Time Password auth module."""
from __future__ import annotations
import asyncio
from io import BytesIO
from typing import Any, cast
from typing import Any
import voluptuous as vol
from homeassistant.auth.models import User
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers.storage import Store
from . import (
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
@@ -20,7 +18,7 @@ from . import (
SetupFlow,
)
REQUIREMENTS = ["pyotp==2.8.0", "PyQRCode==1.2.1"]
REQUIREMENTS = ["pyotp==2.3.0", "PyQRCode==1.2.1"]
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
@@ -48,10 +46,8 @@ def _generate_qr_code(data: str) -> str:
.decode("ascii")
.replace("\n", "")
.replace(
(
'<?xml version="1.0" encoding="UTF-8"?>'
'<svg xmlns="http://www.w3.org/2000/svg"'
),
'<?xml version="1.0" encoding="UTF-8"?>'
'<svg xmlns="http://www.w3.org/2000/svg"',
"<svg",
)
)
@@ -80,15 +76,15 @@ class TotpAuthModule(MultiFactorAuthModule):
"""Initialize the user data store."""
super().__init__(hass, config)
self._users: dict[str, str] | None = None
self._user_store = Store[dict[str, dict[str, str]]](
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
self._user_store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._init_lock = asyncio.Lock()
@property
def input_schema(self) -> vol.Schema:
"""Validate login flow input data."""
return vol.Schema({vol.Required(INPUT_FIELD_CODE): str})
return vol.Schema({INPUT_FIELD_CODE: str})
async def _async_load(self) -> None:
"""Load stored data."""
@@ -96,14 +92,16 @@ class TotpAuthModule(MultiFactorAuthModule):
if self._users is not None:
return
if (data := await self._user_store.async_load()) is None:
data = cast(dict[str, dict[str, str]], {STORAGE_USERS: {}})
data = await self._user_store.async_load()
if data is None:
data = {STORAGE_USERS: {}}
self._users = data.get(STORAGE_USERS, {})
async def _async_save(self) -> None:
"""Save data."""
await self._user_store.async_save({STORAGE_USERS: self._users or {}})
await self._user_store.async_save({STORAGE_USERS: self._users})
def _add_ota_secret(self, user_id: str, secret: str | None = None) -> str:
"""Create a ota_secret for user."""
@@ -111,7 +109,7 @@ class TotpAuthModule(MultiFactorAuthModule):
ota_secret: str = secret or pyotp.random_base32()
self._users[user_id] = ota_secret # type: ignore[index]
self._users[user_id] = ota_secret # type: ignore
return ota_secret
async def async_setup_flow(self, user_id: str) -> SetupFlow:
@@ -140,7 +138,7 @@ class TotpAuthModule(MultiFactorAuthModule):
if self._users is None:
await self._async_load()
if self._users.pop(user_id, None): # type: ignore[union-attr]
if self._users.pop(user_id, None): # type: ignore
await self._async_save()
async def async_is_user_setup(self, user_id: str) -> bool:
@@ -148,7 +146,7 @@ class TotpAuthModule(MultiFactorAuthModule):
if self._users is None:
await self._async_load()
return user_id in self._users # type: ignore[operator]
return user_id in self._users # type: ignore
async def async_validate(self, user_id: str, user_input: dict[str, Any]) -> bool:
"""Return True if validation passed."""
@@ -165,7 +163,8 @@ class TotpAuthModule(MultiFactorAuthModule):
"""Validate two factor authentication code."""
import pyotp # pylint: disable=import-outside-toplevel
if (ota_secret := self._users.get(user_id)) is None: # type: ignore[union-attr]
ota_secret = self._users.get(user_id) # type: ignore
if ota_secret is None:
# even we cannot find user, we still do verify
# to make timing the same as if user was found.
pyotp.TOTP(DUMMY_SECRET).verify(code, valid_window=1)
@@ -185,9 +184,9 @@ class TotpSetupFlow(SetupFlow):
# to fix typing complaint
self._auth_module: TotpAuthModule = auth_module
self._user = user
self._ota_secret: str = ""
self._url: str | None = None
self._image: str | None = None
self._ota_secret: str | None = None
self._url = None # type Optional[str]
self._image = None # type Optional[str]
async def async_step_init(
self, user_input: dict[str, str] | None = None
@@ -209,7 +208,9 @@ class TotpSetupFlow(SetupFlow):
result = await self._auth_module.async_setup_user(
self._user_id, {"secret": self._ota_secret}
)
return self.async_create_entry(data={"result": result})
return self.async_create_entry(
title=self._auth_module.name, data={"result": result}
)
errors["base"] = "invalid_code"
@@ -220,7 +221,7 @@ class TotpSetupFlow(SetupFlow):
self._url,
self._image,
) = await hass.async_add_executor_job(
_generate_secret_and_qr_code,
_generate_secret_and_qr_code, # type: ignore
str(self._user.name),
)

View File

@@ -1,35 +1,23 @@
"""Auth models."""
from __future__ import annotations
from datetime import datetime, timedelta
import secrets
from typing import TYPE_CHECKING, Any, NamedTuple
from typing import NamedTuple
import uuid
import attr
from attr import Attribute
from attr.setters import validate
from homeassistant.const import __version__
from homeassistant.data_entry_flow import FlowResult
from homeassistant.util import dt as dt_util
from . import permissions as perm_mdl
from .const import GROUP_ID_ADMIN
if TYPE_CHECKING:
from functools import cached_property
else:
from homeassistant.backports.functools import cached_property
TOKEN_TYPE_NORMAL = "normal"
TOKEN_TYPE_SYSTEM = "system"
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
AuthFlowResult = FlowResult[tuple[str, str]]
@attr.s(slots=True)
class Group:
@@ -41,27 +29,18 @@ class Group:
system_generated: bool = attr.ib(default=False)
def _handle_permissions_change(self: User, user_attr: Attribute, new: Any) -> Any:
"""Handle a change to a permissions."""
self.invalidate_cache()
return validate(self, user_attr, new)
@attr.s(slots=False)
@attr.s(slots=True)
class User:
"""A user."""
name: str | None = attr.ib()
perm_lookup: perm_mdl.PermissionLookup = attr.ib(eq=False, order=False)
id: str = attr.ib(factory=lambda: uuid.uuid4().hex)
is_owner: bool = attr.ib(default=False, on_setattr=_handle_permissions_change)
is_active: bool = attr.ib(default=False, on_setattr=_handle_permissions_change)
is_owner: bool = attr.ib(default=False)
is_active: bool = attr.ib(default=False)
system_generated: bool = attr.ib(default=False)
local_only: bool = attr.ib(default=False)
groups: list[Group] = attr.ib(
factory=list, eq=False, order=False, on_setattr=_handle_permissions_change
)
groups: list[Group] = attr.ib(factory=list, eq=False, order=False)
# List of credentials of a user.
credentials: list[Credentials] = attr.ib(factory=list, eq=False, order=False)
@@ -71,31 +50,40 @@ class User:
factory=dict, eq=False, order=False
)
@cached_property
_permissions: perm_mdl.PolicyPermissions | None = attr.ib(
init=False,
eq=False,
order=False,
default=None,
)
@property
def permissions(self) -> perm_mdl.AbstractPermissions:
"""Return permissions object for user."""
if self.is_owner:
return perm_mdl.OwnerPermissions
return perm_mdl.PolicyPermissions(
if self._permissions is not None:
return self._permissions
self._permissions = perm_mdl.PolicyPermissions(
perm_mdl.merge_policies([group.policy for group in self.groups]),
self.perm_lookup,
)
@cached_property
return self._permissions
@property
def is_admin(self) -> bool:
"""Return if user is part of the admin group."""
return self.is_owner or (
self.is_active and any(gr.id == GROUP_ID_ADMIN for gr in self.groups)
)
if self.is_owner:
return True
def invalidate_cache(self) -> None:
"""Invalidate permission and is_admin cache."""
for attr_to_invalidate in ("permissions", "is_admin"):
# try is must more efficient than suppress
try: # noqa: SIM105
delattr(self, attr_to_invalidate)
except AttributeError:
pass
return self.is_active and any(gr.id == GROUP_ID_ADMIN for gr in self.groups)
def invalidate_permission_cache(self) -> None:
"""Invalidate permission cache."""
self._permissions = None
@attr.s(slots=True)
@@ -121,8 +109,6 @@ class RefreshToken:
last_used_at: datetime | None = attr.ib(default=None)
last_used_ip: str | None = attr.ib(default=None)
expire_at: float | None = attr.ib(default=None)
credential: Credentials | None = attr.ib(default=None)
version: str | None = attr.ib(default=__version__)
@@ -147,5 +133,3 @@ class UserMeta(NamedTuple):
name: str | None
is_active: bool
group: str | None = None
local_only: bool | None = None

View File

@@ -1,30 +1,21 @@
"""Permissions for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from typing import Any
import logging
from typing import Any, Callable
import voluptuous as vol
from .const import CAT_ENTITIES
from .entities import ENTITY_POLICY_SCHEMA, compile_entities
from .merge import merge_policies
from .merge import merge_policies # noqa: F401
from .models import PermissionLookup
from .types import PolicyType
from .util import test_all
POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
__all__ = [
"POLICY_SCHEMA",
"merge_policies",
"PermissionLookup",
"PolicyType",
"AbstractPermissions",
"PolicyPermissions",
"OwnerPermissions",
]
_LOGGER = logging.getLogger(__name__)
class AbstractPermissions:
@@ -42,7 +33,9 @@ class AbstractPermissions:
def check_entity(self, entity_id: str, key: str) -> bool:
"""Check if we can access entity."""
if (entity_func := self._cached_entity_func) is None:
entity_func = self._cached_entity_func
if entity_func is None:
entity_func = self._cached_entity_func = self._entity_func()
return entity_func(entity_id, key)

View File

@@ -1,5 +1,4 @@
"""Permission constants."""
CAT_ENTITIES = "entities"
CAT_CONFIG_ENTRIES = "config_entries"
SUBCAT_ALL = "all"

View File

@@ -1,9 +1,8 @@
"""Entity permissions."""
from __future__ import annotations
from collections import OrderedDict
from collections.abc import Callable
from typing import Callable
import voluptuous as vol
@@ -48,7 +47,7 @@ def _lookup_domain(
perm_lookup: PermissionLookup, domains_dict: SubCategoryDict, entity_id: str
) -> ValueType | None:
"""Look up entity permissions by domain."""
return domains_dict.get(entity_id.partition(".")[0])
return domains_dict.get(entity_id.split(".", 1)[0])
def _lookup_area(

View File

@@ -1,43 +0,0 @@
"""Permission for events."""
from __future__ import annotations
from typing import Final
from homeassistant.const import (
EVENT_COMPONENT_LOADED,
EVENT_CORE_CONFIG_UPDATE,
EVENT_LOVELACE_UPDATED,
EVENT_PANELS_UPDATED,
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
EVENT_RECORDER_HOURLY_STATISTICS_GENERATED,
EVENT_SERVICE_REGISTERED,
EVENT_SERVICE_REMOVED,
EVENT_SHOPPING_LIST_UPDATED,
EVENT_STATE_CHANGED,
EVENT_THEMES_UPDATED,
)
from homeassistant.helpers.area_registry import EVENT_AREA_REGISTRY_UPDATED
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
from homeassistant.helpers.issue_registry import EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED
# These are events that do not contain any sensitive data
# Except for state_changed, which is handled accordingly.
SUBSCRIBE_ALLOWLIST: Final[set[str]] = {
EVENT_AREA_REGISTRY_UPDATED,
EVENT_COMPONENT_LOADED,
EVENT_CORE_CONFIG_UPDATE,
EVENT_DEVICE_REGISTRY_UPDATED,
EVENT_ENTITY_REGISTRY_UPDATED,
EVENT_REPAIRS_ISSUE_REGISTRY_UPDATED,
EVENT_LOVELACE_UPDATED,
EVENT_PANELS_UPDATED,
EVENT_RECORDER_5MIN_STATISTICS_GENERATED,
EVENT_RECORDER_HOURLY_STATISTICS_GENERATED,
EVENT_SERVICE_REGISTERED,
EVENT_SERVICE_REMOVED,
EVENT_SHOPPING_LIST_UPDATED,
EVENT_STATE_CHANGED,
EVENT_THEMES_UPDATED,
}

View File

@@ -1,5 +1,4 @@
"""Merging of policies."""
from __future__ import annotations
from typing import cast

View File

@@ -1,5 +1,4 @@
"""Models for permissions."""
from __future__ import annotations
from typing import TYPE_CHECKING
@@ -7,12 +6,15 @@ from typing import TYPE_CHECKING
import attr
if TYPE_CHECKING:
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers import (
device_registry as dev_reg,
entity_registry as ent_reg,
)
@attr.s(slots=True)
class PermissionLookup:
"""Class to hold data for permission lookups."""
entity_registry: er.EntityRegistry = attr.ib()
device_registry: dr.DeviceRegistry = attr.ib()
entity_registry: ent_reg.EntityRegistry = attr.ib()
device_registry: dev_reg.DeviceRegistry = attr.ib()

View File

@@ -1,5 +1,4 @@
"""System policies."""
from .const import CAT_ENTITIES, POLICY_READ, SUBCAT_ALL
ADMIN_POLICY = {CAT_ENTITIES: True}

View File

@@ -1,27 +1,28 @@
"""Common code for permissions."""
from collections.abc import Mapping
from typing import Mapping, Union
# MyPy doesn't support recursion yet. So writing it out as far as we need.
ValueType = (
ValueType = Union[
# Example: entities.all = { read: true, control: true }
Mapping[str, bool] | bool | None
)
Mapping[str, bool],
bool,
None,
]
# Example: entities.domains = { light: … }
SubCategoryDict = Mapping[str, ValueType]
SubCategoryType = SubCategoryDict | bool | None
SubCategoryType = Union[SubCategoryDict, bool, None]
CategoryType = (
CategoryType = Union[
# Example: entities.domains
Mapping[str, SubCategoryType]
Mapping[str, SubCategoryType],
# Example: entities.all
| Mapping[str, ValueType]
| bool
| None
)
Mapping[str, ValueType],
bool,
None,
]
# Example: { entities: … }
PolicyType = Mapping[str, CategoryType]

View File

@@ -1,17 +1,15 @@
"""Helpers to deal with permissions."""
from __future__ import annotations
from collections.abc import Callable
from functools import wraps
from typing import cast
from typing import Callable, Dict, Optional, cast
from .const import SUBCAT_ALL
from .models import PermissionLookup
from .types import CategoryType, SubCategoryDict, ValueType
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], ValueType | None]
SubCatLookupType = dict[str, LookupFunc]
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], Optional[ValueType]]
SubCatLookupType = Dict[str, LookupFunc]
def lookup_all(
@@ -74,7 +72,8 @@ def compile_policy(
def apply_policy_funcs(object_id: str, key: str) -> bool:
"""Apply several policy functions."""
for func in funcs:
if (result := func(object_id, key)) is not None:
result = func(object_id, key)
if result is not None:
return result
return False
@@ -110,4 +109,4 @@ def test_all(policy: CategoryType, key: str) -> bool:
if not isinstance(all_policy, dict):
return bool(all_policy)
return all_policy.get(key, False) # type: ignore[no-any-return]
return all_policy.get(key, False)

View File

@@ -1,5 +1,4 @@
"""Auth providers for Home Assistant."""
from __future__ import annotations
from collections.abc import Mapping
@@ -14,18 +13,21 @@ from voluptuous.humanize import humanize_error
from homeassistant import data_entry_flow, requirements
from homeassistant.const import CONF_ID, CONF_NAME, CONF_TYPE
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util import dt as dt_util
from homeassistant.util.decorator import Registry
from ..auth_store import AuthStore
from ..const import MFA_SESSION_EXPIRATION
from ..models import AuthFlowResult, Credentials, RefreshToken, User, UserMeta
from ..models import Credentials, RefreshToken, User, UserMeta
# mypy: disallow-any-generics
_LOGGER = logging.getLogger(__name__)
DATA_REQS = "auth_prov_reqs_processed"
AUTH_PROVIDERS: Registry[str, type[AuthProvider]] = Registry()
AUTH_PROVIDERS = Registry()
AUTH_PROVIDER_SCHEMA = vol.Schema(
{
@@ -62,12 +64,12 @@ class AuthProvider:
@property
def type(self) -> str:
"""Return type of the provider."""
return self.config[CONF_TYPE] # type: ignore[no-any-return]
return self.config[CONF_TYPE] # type: ignore
@property
def name(self) -> str:
"""Return the name of the auth provider."""
return self.config.get(CONF_NAME, self.DEFAULT_TITLE) # type: ignore[no-any-return]
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
@property
def support_mfa(self) -> bool:
@@ -136,11 +138,11 @@ async def auth_provider_from_config(
hass: HomeAssistant, store: AuthStore, config: dict[str, Any]
) -> AuthProvider:
"""Initialize an auth provider from a config."""
provider_name: str = config[CONF_TYPE]
provider_name = config[CONF_TYPE]
module = await load_auth_provider_module(hass, provider_name)
try:
config = module.CONFIG_SCHEMA(config)
config = module.CONFIG_SCHEMA(config) # type: ignore
except vol.Invalid as err:
_LOGGER.error(
"Invalid configuration for auth provider %s: %s",
@@ -149,7 +151,7 @@ async def auth_provider_from_config(
)
raise
return AUTH_PROVIDERS[provider_name](hass, store, config)
return AUTH_PROVIDERS[provider_name](hass, store, config) # type: ignore
async def load_auth_provider_module(
@@ -167,12 +169,15 @@ async def load_auth_provider_module(
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
return module
if (processed := hass.data.get(DATA_REQS)) is None:
processed = hass.data.get(DATA_REQS)
if processed is None:
processed = hass.data[DATA_REQS] = set()
elif provider in processed:
return module
reqs = module.REQUIREMENTS
# https://github.com/python/mypy/issues/1424
reqs = module.REQUIREMENTS # type: ignore
await requirements.async_process_requirements(
hass, f"auth provider {provider}", reqs
)
@@ -181,11 +186,9 @@ async def load_auth_provider_module(
return module
class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
class LoginFlow(data_entry_flow.FlowHandler):
"""Handler for the login flow."""
_flow_result = AuthFlowResult
def __init__(self, auth_provider: AuthProvider) -> None:
"""Initialize the login flow."""
self._auth_provider = auth_provider
@@ -199,7 +202,7 @@ class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the first step of login flow.
Return self.async_show_form(step_id='init') if user_input is None.
@@ -209,7 +212,7 @@ class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
async def async_step_select_mfa_module(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of select mfa module."""
errors = {}
@@ -234,7 +237,7 @@ class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
async def async_step_mfa(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of mfa validation."""
assert self.credential
assert self.user
@@ -252,7 +255,9 @@ class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
auth_module, "async_initialize_login_mfa_step"
):
try:
await auth_module.async_initialize_login_mfa_step(self.user.id)
await auth_module.async_initialize_login_mfa_step( # type: ignore
self.user.id
)
except HomeAssistantError:
_LOGGER.exception("Error initializing MFA step")
return self.async_abort(reason="unknown_error")
@@ -272,7 +277,7 @@ class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
if not errors:
return await self.async_finish(self.credential)
description_placeholders: dict[str, str] = {
description_placeholders: dict[str, str | None] = {
"mfa_module_name": auth_module.name,
"mfa_module_id": auth_module.id,
}
@@ -284,6 +289,6 @@ class LoginFlow(data_entry_flow.FlowHandler[AuthFlowResult, tuple[str, str]]):
errors=errors,
)
async def async_finish(self, flow_result: Any) -> AuthFlowResult:
async def async_finish(self, flow_result: Any) -> FlowResult:
"""Handle the pass of login flow."""
return self.async_create_entry(data=flow_result)
return self.async_create_entry(title=self._auth_provider.name, data=flow_result)

View File

@@ -1,8 +1,8 @@
"""Auth provider that validates credentials via an external command."""
from __future__ import annotations
import asyncio
import collections
from collections.abc import Mapping
import logging
import os
@@ -11,10 +11,13 @@ from typing import Any, cast
import voluptuous as vol
from homeassistant.const import CONF_COMMAND
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from ..models import AuthFlowResult, Credentials, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from ..models import Credentials, UserMeta
# mypy: disallow-any-generics
CONF_ARGS = "args"
CONF_META = "meta"
@@ -44,11 +47,7 @@ class CommandLineAuthProvider(AuthProvider):
DEFAULT_TITLE = "Command Line Authentication"
# which keys to accept from a program's stdout
ALLOWED_META_KEYS = (
"name",
"group",
"local_only",
)
ALLOWED_META_KEYS = ("name",)
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Extend parent's __init__.
@@ -72,7 +71,6 @@ class CommandLineAuthProvider(AuthProvider):
*self.config[CONF_ARGS],
env=env,
stdout=asyncio.subprocess.PIPE if self.config[CONF_META] else None,
close_fds=False, # required for posix_spawn
)
stdout, _ = await process.communicate()
except OSError as err:
@@ -93,12 +91,12 @@ class CommandLineAuthProvider(AuthProvider):
for _line in stdout.splitlines():
try:
line = _line.decode().lstrip()
if line.startswith("#"):
continue
key, value = line.split("=", 1)
except ValueError:
# malformed line
continue
if line.startswith("#") or "=" not in line:
continue
key, _, value = line.partition("=")
key = key.strip()
value = value.strip()
if key in self.ALLOWED_META_KEYS:
@@ -122,15 +120,10 @@ class CommandLineAuthProvider(AuthProvider):
) -> UserMeta:
"""Return extra user metadata for credentials.
Currently, supports name, group and local_only.
Currently, only name is supported.
"""
meta = self._user_meta.get(credentials.data["username"], {})
return UserMeta(
name=meta.get("name"),
is_active=True,
group=meta.get("group"),
local_only=meta.get("local_only") == "true",
)
return UserMeta(name=meta.get("name"), is_active=True)
class CommandLineLoginFlow(LoginFlow):
@@ -138,7 +131,7 @@ class CommandLineLoginFlow(LoginFlow):
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of the form."""
errors = {}
@@ -155,13 +148,10 @@ class CommandLineLoginFlow(LoginFlow):
user_input.pop("password")
return await self.async_finish(user_input)
schema: dict[str, type] = collections.OrderedDict()
schema["username"] = str
schema["password"] = str
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required("username"): str,
vol.Required("password"): str,
}
),
errors=errors,
step_id="init", data_schema=vol.Schema(schema), errors=errors
)

View File

@@ -1,9 +1,9 @@
"""Home Assistant auth provider."""
from __future__ import annotations
import asyncio
import base64
from collections import OrderedDict
from collections.abc import Mapping
import logging
from typing import Any, cast
@@ -13,11 +13,13 @@ import voluptuous as vol
from homeassistant.const import CONF_ID
from homeassistant.core import HomeAssistant, callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.storage import Store
from ..models import AuthFlowResult, Credentials, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from ..models import Credentials, UserMeta
# mypy: disallow-any-generics
STORAGE_VERSION = 1
STORAGE_KEY = "auth_provider.homeassistant"
@@ -61,10 +63,10 @@ class Data:
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the user data store."""
self.hass = hass
self._store = Store[dict[str, list[dict[str, str]]]](
hass, STORAGE_VERSION, STORAGE_KEY, private=True, atomic_writes=True
self._store = hass.helpers.storage.Store(
STORAGE_VERSION, STORAGE_KEY, private=True
)
self._data: dict[str, list[dict[str, str]]] | None = None
self._data: dict[str, Any] | None = None
# Legacy mode will allow usernames to start/end with whitespace
# and will compare usernames case-insensitive.
# Remove in 2020 or when we launch 1.0.
@@ -80,8 +82,10 @@ class Data:
async def async_load(self) -> None:
"""Load stored data."""
if (data := await self._store.async_load()) is None:
data = cast(dict[str, list[dict[str, str]]], {"users": []})
data = await self._store.async_load()
if data is None:
data = {"users": []}
seen: set[str] = set()
@@ -89,15 +93,15 @@ class Data:
username = user["username"]
# check if we have duplicates
if (folded := username.casefold()) in seen:
folded = username.casefold()
if folded in seen:
self.is_legacy = True
logging.getLogger(__name__).warning(
(
"Home Assistant auth provider is running in legacy mode "
"because we detected usernames that are case-insensitive"
"equivalent. Please change the username: '%s'."
),
"Home Assistant auth provider is running in legacy mode "
"because we detected usernames that are case-insensitive"
"equivalent. Please change the username: '%s'.",
username,
)
@@ -110,11 +114,9 @@ class Data:
self.is_legacy = True
logging.getLogger(__name__).warning(
(
"Home Assistant auth provider is running in legacy mode "
"because we detected usernames that start or end in a "
"space. Please change the username: '%s'."
),
"Home Assistant auth provider is running in legacy mode "
"because we detected usernames that start or end in a "
"space. Please change the username: '%s'.",
username,
)
@@ -125,8 +127,7 @@ class Data:
@property
def users(self) -> list[dict[str, str]]:
"""Return users."""
assert self._data is not None
return self._data["users"]
return self._data["users"] # type: ignore
def validate_login(self, username: str, password: str) -> None:
"""Validate a username and password.
@@ -153,7 +154,9 @@ class Data:
if not bcrypt.checkpw(password.encode(), user_hash):
raise InvalidAuth
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
def hash_password( # pylint: disable=no-self-use
self, password: str, for_storage: bool = False
) -> bytes:
"""Encode a password."""
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
@@ -209,8 +212,7 @@ class Data:
async def async_save(self) -> None:
"""Save data."""
if self._data is not None:
await self._store.async_save(self._data)
await self._store.async_save(self._data)
@AUTH_PROVIDERS.register("homeassistant")
@@ -321,7 +323,7 @@ class HassLoginFlow(LoginFlow):
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of the form."""
errors = {}
@@ -337,13 +339,10 @@ class HassLoginFlow(LoginFlow):
user_input.pop("password")
return await self.async_finish(user_input)
schema: dict[str, type] = OrderedDict()
schema["username"] = str
schema["password"] = str
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required("username"): str,
vol.Required("password"): str,
}
),
errors=errors,
step_id="init", data_schema=vol.Schema(schema), errors=errors
)

View File

@@ -1,7 +1,7 @@
"""Example auth provider."""
from __future__ import annotations
from collections import OrderedDict
from collections.abc import Mapping
import hmac
from typing import Any, cast
@@ -9,10 +9,13 @@ from typing import Any, cast
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
from ..models import AuthFlowResult, Credentials, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from ..models import Credentials, UserMeta
# mypy: disallow-any-generics
USER_SCHEMA = vol.Schema(
{
@@ -98,9 +101,9 @@ class ExampleLoginFlow(LoginFlow):
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of the form."""
errors = None
errors = {}
if user_input is not None:
try:
@@ -108,19 +111,16 @@ class ExampleLoginFlow(LoginFlow):
user_input["username"], user_input["password"]
)
except InvalidAuthError:
errors = {"base": "invalid_auth"}
errors["base"] = "invalid_auth"
if not errors:
user_input.pop("password")
return await self.async_finish(user_input)
schema: dict[str, type] = OrderedDict()
schema["username"] = str
schema["password"] = str
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Required("username"): str,
vol.Required("password"): str,
}
),
errors=errors,
step_id="init", data_schema=vol.Schema(schema), errors=errors
)

View File

@@ -1,8 +1,8 @@
"""Support Legacy API password auth provider.
"""
Support Legacy API password auth provider.
It will be removed when auth system production ready
"""
from __future__ import annotations
from collections.abc import Mapping
@@ -11,39 +11,23 @@ from typing import Any, cast
import voluptuous as vol
from homeassistant.core import async_get_hass, callback
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from ..models import AuthFlowResult, Credentials, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from ..models import Credentials, UserMeta
# mypy: disallow-any-generics
AUTH_PROVIDER_TYPE = "legacy_api_password"
CONF_API_PASSWORD = "api_password"
_CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
)
def _create_repair_and_validate(config: dict[str, Any]) -> dict[str, Any]:
async_create_issue(
async_get_hass(),
"auth",
"deprecated_legacy_api_password",
breaks_in_ha_version="2024.6.0",
is_fixable=False,
severity=IssueSeverity.WARNING,
translation_key="deprecated_legacy_api_password",
)
return _CONFIG_SCHEMA(config) # type: ignore[no-any-return]
CONFIG_SCHEMA = _create_repair_and_validate
LEGACY_USER_NAME = "Legacy API password user"
@@ -89,7 +73,8 @@ class LegacyApiPasswordAuthProvider(AuthProvider):
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""Return info for the user.
"""
Return info for the user.
Will be used to populate info when creating a new user.
"""
@@ -101,7 +86,7 @@ class LegacyLoginFlow(LoginFlow):
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of the form."""
errors = {}
@@ -117,7 +102,5 @@ class LegacyLoginFlow(LoginFlow):
return await self.async_finish({})
return self.async_show_form(
step_id="init",
data_schema=vol.Schema({vol.Required("password"): str}),
errors=errors,
step_id="init", data_schema=vol.Schema({"password": str}), errors=errors
)

View File

@@ -3,7 +3,6 @@
It shows list of users if access from trusted network.
Abort login flow if not access from trusted network.
"""
from __future__ import annotations
from collections.abc import Mapping
@@ -15,21 +14,23 @@ from ipaddress import (
ip_address,
ip_network,
)
from typing import Any, cast
from typing import Any, Dict, List, Union, cast
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.data_entry_flow import FlowResult
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.network import is_cloud_connection
from .. import InvalidAuthError
from ..models import AuthFlowResult, Credentials, RefreshToken, UserMeta
from . import AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, AuthProvider, LoginFlow
from .. import InvalidAuthError
from ..models import Credentials, RefreshToken, UserMeta
IPAddress = IPv4Address | IPv6Address
IPNetwork = IPv4Network | IPv6Network
# mypy: disallow-any-generics
IPAddress = Union[IPv4Address, IPv6Address]
IPNetwork = Union[IPv4Network, IPv6Network]
CONF_TRUSTED_NETWORKS = "trusted_networks"
CONF_TRUSTED_USERS = "trusted_users"
@@ -47,7 +48,7 @@ CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
[
vol.Or(
cv.uuid4_hex,
vol.Schema({vol.Required(CONF_GROUP): str}),
vol.Schema({vol.Required(CONF_GROUP): cv.uuid4_hex}),
)
],
)
@@ -75,12 +76,12 @@ class TrustedNetworksAuthProvider(AuthProvider):
@property
def trusted_networks(self) -> list[IPNetwork]:
"""Return trusted networks."""
return cast(list[IPNetwork], self.config[CONF_TRUSTED_NETWORKS])
return cast(List[IPNetwork], self.config[CONF_TRUSTED_NETWORKS])
@property
def trusted_users(self) -> dict[IPNetwork, Any]:
"""Return trusted users per network."""
return cast(dict[IPNetwork, Any], self.config[CONF_TRUSTED_USERS])
return cast(Dict[IPNetwork, Any], self.config[CONF_TRUSTED_USERS])
@property
def trusted_proxies(self) -> list[IPNetwork]:
@@ -193,9 +194,6 @@ class TrustedNetworksAuthProvider(AuthProvider):
if any(ip_addr in trusted_proxy for trusted_proxy in self.trusted_proxies):
raise InvalidAuthError("Can't allow access from a proxy server")
if is_cloud_connection(self.hass):
raise InvalidAuthError("Can't allow access from Home Assistant Cloud")
@callback
def async_validate_refresh_token(
self, refresh_token: RefreshToken, remote_ip: str | None = None
@@ -226,7 +224,7 @@ class TrustedNetworksLoginFlow(LoginFlow):
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> AuthFlowResult:
) -> FlowResult:
"""Handle the step of the form."""
try:
cast(
@@ -246,7 +244,5 @@ class TrustedNetworksLoginFlow(LoginFlow):
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{vol.Required("user"): vol.In(self._available_users)}
),
data_schema=vol.Schema({"user": vol.In(self._available_users)}),
)

View File

@@ -1,279 +0,0 @@
A. HISTORY OF THE SOFTWARE
==========================
Python was created in the early 1990s by Guido van Rossum at Stichting
Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands
as a successor of a language called ABC. Guido remains Python's
principal author, although it includes many contributions from others.
In 1995, Guido continued his work on Python at the Corporation for
National Research Initiatives (CNRI, see https://www.cnri.reston.va.us)
in Reston, Virginia where he released several versions of the
software.
In May 2000, Guido and the Python core development team moved to
BeOpen.com to form the BeOpen PythonLabs team. In October of the same
year, the PythonLabs team moved to Digital Creations, which became
Zope Corporation. In 2001, the Python Software Foundation (PSF, see
https://www.python.org/psf/) was formed, a non-profit organization
created specifically to own Python-related Intellectual Property.
Zope Corporation was a sponsoring member of the PSF.
All Python releases are Open Source (see https://opensource.org for
the Open Source Definition). Historically, most, but not all, Python
releases have also been GPL-compatible; the table below summarizes
the various releases.
Release Derived Year Owner GPL-
from compatible? (1)
0.9.0 thru 1.2 1991-1995 CWI yes
1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
1.6 1.5.2 2000 CNRI no
2.0 1.6 2000 BeOpen.com no
1.6.1 1.6 2001 CNRI yes (2)
2.1 2.0+1.6.1 2001 PSF no
2.0.1 2.0+1.6.1 2001 PSF yes
2.1.1 2.1+2.0.1 2001 PSF yes
2.1.2 2.1.1 2002 PSF yes
2.1.3 2.1.2 2002 PSF yes
2.2 and above 2.1.1 2001-now PSF yes
Footnotes:
(1) GPL-compatible doesn't mean that we're distributing Python under
the GPL. All Python licenses, unlike the GPL, let you distribute
a modified version without making your changes open source. The
GPL-compatible licenses make it possible to combine Python with
other software that is released under the GPL; the others don't.
(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
because its license has a choice of law clause. According to
CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
is "not incompatible" with the GPL.
Thanks to the many outside volunteers who have worked under Guido's
direction to make these releases possible.
B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
===============================================================
Python software and documentation are licensed under the
Python Software Foundation License Version 2.
Starting with Python 3.8.6, examples, recipes, and other code in
the documentation are dual licensed under the PSF License Version 2
and the Zero-Clause BSD license.
Some software incorporated into Python is under different licenses.
The licenses are listed with code falling under that license.
PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
--------------------------------------------
1. This LICENSE AGREEMENT is between the Python Software Foundation
("PSF"), and the Individual or Organization ("Licensee") accessing and
otherwise using this software ("Python") in source or binary form and
its associated documentation.
2. Subject to the terms and conditions of this License Agreement, PSF hereby
grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
analyze, test, perform and/or display publicly, prepare derivative works,
distribute, and otherwise use Python alone or in any derivative version,
provided, however, that PSF's License Agreement and PSF's notice of copyright,
i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation;
All Rights Reserved" are retained in Python alone or in any derivative version
prepared by Licensee.
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python.
4. PSF is making Python available to Licensee on an "AS IS"
basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. Nothing in this License Agreement shall be deemed to create any
relationship of agency, partnership, or joint venture between PSF and
Licensee. This License Agreement does not grant permission to use PSF
trademarks or trade name in a trademark sense to endorse or promote
products or services of Licensee, or any third party.
8. By copying, installing or otherwise using Python, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
-------------------------------------------
BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
Individual or Organization ("Licensee") accessing and otherwise using
this software in source or binary form and its associated
documentation ("the Software").
2. Subject to the terms and conditions of this BeOpen Python License
Agreement, BeOpen hereby grants Licensee a non-exclusive,
royalty-free, world-wide license to reproduce, analyze, test, perform
and/or display publicly, prepare derivative works, distribute, and
otherwise use the Software alone or in any derivative version,
provided, however, that the BeOpen Python License is retained in the
Software, alone or in any derivative version prepared by Licensee.
3. BeOpen is making the Software available to Licensee on an "AS IS"
basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
5. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
6. This License Agreement shall be governed by and interpreted in all
respects by the law of the State of California, excluding conflict of
law provisions. Nothing in this License Agreement shall be deemed to
create any relationship of agency, partnership, or joint venture
between BeOpen and Licensee. This License Agreement does not grant
permission to use BeOpen trademarks or trade names in a trademark
sense to endorse or promote products or services of Licensee, or any
third party. As an exception, the "BeOpen Python" logos available at
http://www.pythonlabs.com/logos.html may be used according to the
permissions granted on that web page.
7. By copying, installing or otherwise using the software, Licensee
agrees to be bound by the terms and conditions of this License
Agreement.
CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
---------------------------------------
1. This LICENSE AGREEMENT is between the Corporation for National
Research Initiatives, having an office at 1895 Preston White Drive,
Reston, VA 20191 ("CNRI"), and the Individual or Organization
("Licensee") accessing and otherwise using Python 1.6.1 software in
source or binary form and its associated documentation.
2. Subject to the terms and conditions of this License Agreement, CNRI
hereby grants Licensee a nonexclusive, royalty-free, world-wide
license to reproduce, analyze, test, perform and/or display publicly,
prepare derivative works, distribute, and otherwise use Python 1.6.1
alone or in any derivative version, provided, however, that CNRI's
License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
1995-2001 Corporation for National Research Initiatives; All Rights
Reserved" are retained in Python 1.6.1 alone or in any derivative
version prepared by Licensee. Alternately, in lieu of CNRI's License
Agreement, Licensee may substitute the following text (omitting the
quotes): "Python 1.6.1 is made available subject to the terms and
conditions in CNRI's License Agreement. This Agreement together with
Python 1.6.1 may be located on the internet using the following
unique, persistent identifier (known as a handle): 1895.22/1013. This
Agreement may also be obtained from a proxy server on the internet
using the following URL: http://hdl.handle.net/1895.22/1013".
3. In the event Licensee prepares a derivative work that is based on
or incorporates Python 1.6.1 or any part thereof, and wants to make
the derivative work available to others as provided herein, then
Licensee hereby agrees to include in any such work a brief summary of
the changes made to Python 1.6.1.
4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
INFRINGE ANY THIRD PARTY RIGHTS.
5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
6. This License Agreement will automatically terminate upon a material
breach of its terms and conditions.
7. This License Agreement shall be governed by the federal
intellectual property law of the United States, including without
limitation the federal copyright law, and, to the extent such
U.S. federal law does not apply, by the law of the Commonwealth of
Virginia, excluding Virginia's conflict of law provisions.
Notwithstanding the foregoing, with regard to derivative works based
on Python 1.6.1 that incorporate non-separable material that was
previously distributed under the GNU General Public License (GPL), the
law of the Commonwealth of Virginia shall govern this License
Agreement only as to issues arising under or with respect to
Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
License Agreement shall be deemed to create any relationship of
agency, partnership, or joint venture between CNRI and Licensee. This
License Agreement does not grant permission to use CNRI trademarks or
trade name in a trademark sense to endorse or promote products or
services of Licensee, or any third party.
8. By clicking on the "ACCEPT" button where indicated, or by copying,
installing or otherwise using Python 1.6.1, Licensee agrees to be
bound by the terms and conditions of this License Agreement.
ACCEPT
CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
--------------------------------------------------
Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
The Netherlands. All rights reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee is hereby granted,
provided that the above copyright notice appear in all copies and that
both that copyright notice and this permission notice appear in
supporting documentation, and that the name of Stichting Mathematisch
Centrum or CWI not be used in advertising or publicity pertaining to
distribution of the software without specific, written prior
permission.
STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION
----------------------------------------------------------------------
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.

View File

@@ -1,5 +0,0 @@
This package contains backports of Python functionality from future Python
versions.
Some of the backports have been copied directly from the CPython project,
and are subject to license agreement as detailed in LICENSE.Python.

View File

@@ -1 +0,0 @@
"""Backports from newer Python versions."""

View File

@@ -1,16 +0,0 @@
"""Enum backports from standard lib.
This file contained the backport of the StrEnum of Python 3.11.
Since we have dropped support for Python 3.10, we can remove this backport.
This file is kept for now to avoid breaking custom components that might
import it.
"""
from __future__ import annotations
from enum import StrEnum
__all__ = [
"StrEnum",
]

View File

@@ -1,81 +0,0 @@
"""Functools backports from standard lib."""
# This file contains parts of Python's module wrapper
# for the _functools C module
# to allow utilities written in Python to be added
# to the functools module.
# Written by Nick Coghlan <ncoghlan at gmail.com>,
# Raymond Hettinger <python at rcn.com>,
# and Łukasz Langa <lukasz at langa.pl>.
# Copyright © 2001-2023 Python Software Foundation; All Rights Reserved
from __future__ import annotations
from collections.abc import Callable
from types import GenericAlias
from typing import Any, Generic, Self, TypeVar, overload
_T = TypeVar("_T")
class cached_property(Generic[_T]):
"""Backport of Python 3.12's cached_property.
Includes https://github.com/python/cpython/pull/101890/files
"""
def __init__(self, func: Callable[[Any], _T]) -> None:
"""Initialize."""
self.func: Callable[[Any], _T] = func
self.attrname: str | None = None
self.__doc__ = func.__doc__
def __set_name__(self, owner: type[Any], name: str) -> None:
"""Set name."""
if self.attrname is None:
self.attrname = name
elif name != self.attrname:
raise TypeError(
"Cannot assign the same cached_property to two different names "
f"({self.attrname!r} and {name!r})."
)
@overload
def __get__(self, instance: None, owner: type[Any] | None = None) -> Self:
...
@overload
def __get__(self, instance: Any, owner: type[Any] | None = None) -> _T:
...
def __get__(
self, instance: Any | None, owner: type[Any] | None = None
) -> _T | Self:
"""Get."""
if instance is None:
return self
if self.attrname is None:
raise TypeError(
"Cannot use cached_property instance without calling __set_name__ on it."
)
try:
cache = instance.__dict__
# not all objects have __dict__ (e.g. class defines slots)
except AttributeError:
msg = (
f"No '__dict__' attribute on {type(instance).__name__!r} "
f"instance to cache {self.attrname!r} property."
)
raise TypeError(msg) from None
val = self.func(instance)
try:
cache[self.attrname] = val
except TypeError:
msg = (
f"The '__dict__' attribute on {type(instance).__name__!r} instance "
f"does not support item assignment for caching {self.attrname!r} property."
)
raise TypeError(msg) from None
return val
__class_getitem__ = classmethod(GenericAlias) # type: ignore[var-annotated]

View File

@@ -1,20 +1,13 @@
"""Block blocking calls being done in asyncio."""
"""Block I/O being done in asyncio."""
from http.client import HTTPConnection
import time
from .util.async_ import protect_loop
from homeassistant.util.async_ import protect_loop
def enable() -> None:
"""Enable the detection of blocking calls in the event loop."""
"""Enable the detection of I/O in the event loop."""
# Prevent urllib3 and requests doing I/O in event loop
HTTPConnection.putrequest = protect_loop( # type: ignore[method-assign]
HTTPConnection.putrequest
)
# Prevent sleeping in event loop. Non-strict since 2022.02
time.sleep = protect_loop(time.sleep, strict=False)
HTTPConnection.putrequest = protect_loop(HTTPConnection.putrequest) # type: ignore
# Currently disabled. pytz doing I/O when getting timezone.
# Prevent files being opened inside the event loop

View File

@@ -1,90 +1,38 @@
"""Provide methods to bootstrap a Home Assistant instance."""
from __future__ import annotations
import asyncio
import contextlib
from datetime import timedelta
from datetime import datetime
import logging
import logging.handlers
from operator import itemgetter
import os
import platform
import sys
import threading
from time import monotonic
from typing import TYPE_CHECKING, Any
# Import cryptography early since import openssl is not thread-safe
# _frozen_importlib._DeadlockError: deadlock detected by _ModuleLock('cryptography.hazmat.backends.openssl.backend')
import cryptography.hazmat.backends.openssl.backend # noqa: F401
import voluptuous as vol
import yarl
from . import config as conf_util, config_entries, core, loader, requirements
# Pre-import frontend deps which have no requirements here to avoid
# loading them at run time and blocking the event loop. We do this ahead
# of time so that we do not have to flag frontend deps with `import_executor`
# as it would create a thundering heard of executor jobs trying to import
# frontend deps at the same time.
from .components import (
api as api_pre_import, # noqa: F401
auth as auth_pre_import, # noqa: F401
config as config_pre_import, # noqa: F401
device_automation as device_automation_pre_import, # noqa: F401
diagnostics as diagnostics_pre_import, # noqa: F401
file_upload as file_upload_pre_import, # noqa: F401
history as history_pre_import, # noqa: F401
http, # not named pre_import since it has requirements
image_upload as image_upload_import, # noqa: F401 - not named pre_import since it has requirements
logbook as logbook_pre_import, # noqa: F401
lovelace as lovelace_pre_import, # noqa: F401
onboarding as onboarding_pre_import, # noqa: F401
recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
repairs as repairs_pre_import, # noqa: F401
search as search_pre_import, # noqa: F401
sensor as sensor_pre_import, # noqa: F401
system_log as system_log_pre_import, # noqa: F401
webhook as webhook_pre_import, # noqa: F401
websocket_api as websocket_api_pre_import, # noqa: F401
)
from .components.sensor import recorder as sensor_recorder # noqa: F401
from .const import (
FORMAT_DATETIME,
KEY_DATA_LOGGING as DATA_LOGGING,
REQUIRED_NEXT_PYTHON_HA_RELEASE,
REQUIRED_NEXT_PYTHON_VER,
SIGNAL_BOOTSTRAP_INTEGRATIONS,
)
from .exceptions import HomeAssistantError
from .helpers import (
area_registry,
config_validation as cv,
device_registry,
entity,
entity_registry,
floor_registry,
issue_registry,
label_registry,
recorder,
restore_state,
template,
translation,
)
from .helpers.dispatcher import async_dispatcher_send
from .helpers.typing import ConfigType
from .setup import (
BASE_PLATFORMS,
from homeassistant import config as conf_util, config_entries, core, loader
from homeassistant.components import http
from homeassistant.const import REQUIRED_NEXT_PYTHON_DATE, REQUIRED_NEXT_PYTHON_VER
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import area_registry, device_registry, entity_registry
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.typing import ConfigType
from homeassistant.setup import (
DATA_SETUP,
DATA_SETUP_STARTED,
DATA_SETUP_TIME,
async_notify_setup_error,
async_set_domains_to_be_loaded,
async_setup_component,
)
from .util.async_ import create_eager_task
from .util.logging import async_activate_log_queue_handler
from .util.package import async_get_user_site, is_virtual_env
from homeassistant.util.async_ import gather_with_concurrency
import homeassistant.util.dt as dt_util
from homeassistant.util.logging import async_activate_log_queue_handler
from homeassistant.util.package import async_get_user_site, is_virtual_env
if TYPE_CHECKING:
from .runner import RuntimeConfig
@@ -94,106 +42,41 @@ _LOGGER = logging.getLogger(__name__)
ERROR_LOG_FILENAME = "home-assistant.log"
# hass.data key for logging information.
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
DATA_LOGGING = "logging"
LOG_SLOW_STARTUP_INTERVAL = 60
SLOW_STARTUP_CHECK_INTERVAL = 1
SIGNAL_BOOTSTRAP_INTEGRATONS = "bootstrap_integrations"
STAGE_1_TIMEOUT = 120
STAGE_2_TIMEOUT = 300
WRAP_UP_TIMEOUT = 300
COOLDOWN_TIME = 60
MAX_LOAD_CONCURRENTLY = 6
DEBUGGER_INTEGRATIONS = {"debugpy"}
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
CORE_INTEGRATIONS = ("homeassistant", "persistent_notification")
LOGGING_INTEGRATIONS = {
# Set log levels
"logger",
# Error logging
"system_log",
"sentry",
}
FRONTEND_INTEGRATIONS = {
# Get the frontend up and running as soon as possible so problem
# integrations can be removed and database migration status is
# visible in frontend
"frontend",
}
RECORDER_INTEGRATIONS = {
# Setup after frontend
# To record data
"recorder",
}
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
STAGE_1_INTEGRATIONS = {
# We need to make sure discovery integrations
# update their deps before stage 2 integrations
# load them inadvertently before their deps have
# been updated which leads to using an old version
# of the dep, or worse (import errors).
*DISCOVERY_INTEGRATIONS,
# To make sure we forward data to other instances
"mqtt_eventstream",
# To provide account link implementations
"cloud",
# Ensure supervisor is available
"hassio",
}
DEFAULT_INTEGRATIONS = {
# These integrations are set up unless recovery mode is activated.
#
# Integrations providing core functionality:
"analytics", # Needed for onboarding
"application_credentials",
"backup",
# Get the frontend up and running as soon
# as possible so problem integrations can
# be removed
"frontend",
"hardware",
"logger",
"network",
"system_health",
#
# Key-feature:
"automation",
"person",
"scene",
"script",
"tag",
"zone",
#
# Built-in helpers:
"counter",
"input_boolean",
"input_button",
"input_datetime",
"input_number",
"input_select",
"input_text",
"schedule",
"timer",
}
DEFAULT_INTEGRATIONS_RECOVERY_MODE = {
# These integrations are set up if recovery mode is activated.
"frontend",
}
DEFAULT_INTEGRATIONS_SUPERVISOR = {
# These integrations are set up if using the Supervisor
"hassio",
}
CRITICAL_INTEGRATIONS = {
# Recovery mode is activated if these integrations fail to set up
"frontend",
}
SETUP_ORDER = {
# Load logging as soon as possible
"logging": LOGGING_INTEGRATIONS,
# Setup frontend
"frontend": FRONTEND_INTEGRATIONS,
# Setup recorder
"recorder": RECORDER_INTEGRATIONS,
# Start up debuggers. Start these first in case they want to wait.
"debugger": DEBUGGER_INTEGRATIONS,
}
@@ -201,7 +84,8 @@ async def async_setup_hass(
runtime_config: RuntimeConfig,
) -> core.HomeAssistant | None:
"""Set up Home Assistant."""
hass = core.HomeAssistant(runtime_config.config_dir)
hass = core.HomeAssistant()
hass.config.config_dir = runtime_config.config_dir
async_enable_logging(
hass,
@@ -211,10 +95,8 @@ async def async_setup_hass(
runtime_config.log_no_color,
)
hass.config.safe_mode = runtime_config.safe_mode
hass.config.skip_pip = runtime_config.skip_pip
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
if runtime_config.skip_pip:
_LOGGER.warning(
"Skipping pip installation of required modules. This may cause issues"
)
@@ -225,18 +107,18 @@ async def async_setup_hass(
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
loader.async_setup(hass)
config_dict = None
basic_setup_success = False
safe_mode = runtime_config.safe_mode
if not (recovery_mode := runtime_config.recovery_mode):
if not safe_mode:
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
try:
config_dict = await conf_util.async_hass_config_yaml(hass)
except HomeAssistantError as err:
_LOGGER.error(
"Failed to parse configuration.yaml: %s. Activating recovery mode",
"Failed to parse configuration.yaml: %s. Activating safe mode",
err,
)
else:
@@ -248,49 +130,42 @@ async def async_setup_hass(
)
if config_dict is None:
recovery_mode = True
safe_mode = True
elif not basic_setup_success:
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
recovery_mode = True
_LOGGER.warning("Unable to set up core integrations. Activating safe mode")
safe_mode = True
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
_LOGGER.warning(
"Detected that %s did not load. Activating recovery mode",
",".join(CRITICAL_INTEGRATIONS),
)
elif (
"frontend" in hass.data.get(DATA_SETUP, {})
and "frontend" not in hass.config.components
):
_LOGGER.warning("Detected that frontend did not load. Activating safe mode")
# Ask integrations to shut down. It's messy but we can't
# do a clean stop without knowing what is broken
with contextlib.suppress(TimeoutError):
with contextlib.suppress(asyncio.TimeoutError):
async with hass.timeout.async_timeout(10):
await hass.async_stop()
recovery_mode = True
safe_mode = True
old_config = hass.config
old_logging = hass.data.get(DATA_LOGGING)
hass = core.HomeAssistant(old_config.config_dir)
if old_logging:
hass.data[DATA_LOGGING] = old_logging
hass = core.HomeAssistant()
hass.config.skip_pip = old_config.skip_pip
hass.config.skip_pip_packages = old_config.skip_pip_packages
hass.config.internal_url = old_config.internal_url
hass.config.external_url = old_config.external_url
# Setup loader cache after the config dir has been set
loader.async_setup(hass)
hass.config.config_dir = old_config.config_dir
if recovery_mode:
_LOGGER.info("Starting in recovery mode")
hass.config.recovery_mode = True
if safe_mode:
_LOGGER.info("Starting in safe mode")
hass.config.safe_mode = True
http_conf = (await http.async_get_last_config(hass)) or {}
await async_from_config_dict(
{"recovery_mode": {}, "http": http_conf},
{"safe_mode": {}, "http": http_conf},
hass,
)
elif hass.config.safe_mode:
_LOGGER.info("Starting in safe mode")
if runtime_config.open_ui:
hass.add_job(open_hass_ui, hass)
@@ -318,40 +193,6 @@ def open_hass_ui(hass: core.HomeAssistant) -> None:
)
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
"""Load the registries and cache the result of platform.uname().processor."""
if DATA_REGISTRIES_LOADED in hass.data:
return
hass.data[DATA_REGISTRIES_LOADED] = None
def _cache_uname_processor() -> None:
"""Cache the result of platform.uname().processor in the executor.
Multiple modules call this function at startup which
executes a blocking subprocess call. This is a problem for the
asyncio event loop. By primeing the cache of uname we can
avoid the blocking call in the event loop.
"""
platform.uname().processor # pylint: disable=expression-not-assigned
# Load the registries and cache the result of platform.uname().processor
translation.async_setup(hass)
entity.async_setup(hass)
template.async_setup(hass)
await asyncio.gather(
create_eager_task(area_registry.async_load(hass)),
create_eager_task(device_registry.async_load(hass)),
create_eager_task(entity_registry.async_load(hass)),
create_eager_task(floor_registry.async_load(hass)),
create_eager_task(issue_registry.async_load(hass)),
create_eager_task(label_registry.async_load(hass)),
hass.async_add_executor_job(_cache_uname_processor),
create_eager_task(template.async_load_custom_templates(hass)),
create_eager_task(restore_state.async_load(hass)),
create_eager_task(hass.config_entries.async_initialize()),
)
async def async_from_config_dict(
config: ConfigType, hass: core.HomeAssistant
) -> core.HomeAssistant | None:
@@ -363,7 +204,7 @@ async def async_from_config_dict(
start = monotonic()
hass.config_entries = config_entries.ConfigEntries(hass, config)
await async_load_base_functionality(hass)
await hass.config_entries.async_initialize()
# Set up core.
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
@@ -371,7 +212,7 @@ async def async_from_config_dict(
if not all(
await asyncio.gather(
*(
create_eager_task(async_setup_component(hass, domain, config))
async_setup_component(hass, domain, config)
for domain in CORE_INTEGRATIONS
)
)
@@ -386,12 +227,12 @@ async def async_from_config_dict(
try:
await conf_util.async_process_ha_core_config(hass, core_config)
except vol.Invalid as config_err:
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
async_notify_setup_error(hass, core.DOMAIN)
conf_util.async_log_exception(config_err, "homeassistant", core_config, hass)
return None
except HomeAssistantError:
_LOGGER.error(
"Home Assistant core failed to initialize. Further initialization aborted"
"Home Assistant core failed to initialize. "
"Further initialization aborted"
)
return None
@@ -400,35 +241,18 @@ async def async_from_config_dict(
stop = monotonic()
_LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
if (
REQUIRED_NEXT_PYTHON_HA_RELEASE
and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER
):
current_python_version = ".".join(str(x) for x in sys.version_info[:3])
required_python_version = ".".join(str(x) for x in REQUIRED_NEXT_PYTHON_VER[:2])
_LOGGER.warning(
(
"Support for the running Python version %s is deprecated and "
"will be removed in Home Assistant %s; "
"Please upgrade Python to %s"
),
current_python_version,
REQUIRED_NEXT_PYTHON_HA_RELEASE,
required_python_version,
if REQUIRED_NEXT_PYTHON_DATE and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER:
msg = (
"Support for the running Python version "
f"{'.'.join(str(x) for x in sys.version_info[:3])} is deprecated and will "
f"be removed in the first release after {REQUIRED_NEXT_PYTHON_DATE}. "
"Please upgrade Python to "
f"{'.'.join(str(x) for x in REQUIRED_NEXT_PYTHON_VER)} or "
"higher."
)
issue_registry.async_create_issue(
hass,
core.DOMAIN,
"python_version",
is_fixable=False,
severity=issue_registry.IssueSeverity.WARNING,
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
translation_key="python_version",
translation_placeholders={
"current_python_version": current_python_version,
"required_python_version": required_python_version,
"breaks_in_ha_version": REQUIRED_NEXT_PYTHON_HA_RELEASE,
},
_LOGGER.warning(msg)
hass.components.persistent_notification.async_create(
msg, "Python version", "python_version"
)
return hass
@@ -446,13 +270,12 @@ def async_enable_logging(
This method must be run in the event loop.
"""
fmt = (
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
)
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
datefmt = "%Y-%m-%d %H:%M:%S"
if not log_no_color:
try:
# pylint: disable-next=import-outside-toplevel
# pylint: disable=import-outside-toplevel
from colorlog import ColoredFormatter
# basicConfig must be called after importing colorlog in order to
@@ -463,7 +286,7 @@ def async_enable_logging(
logging.getLogger().handlers[0].setFormatter(
ColoredFormatter(
colorfmt,
datefmt=FORMAT_DATETIME,
datefmt=datefmt,
reset=True,
log_colors={
"DEBUG": "cyan",
@@ -479,29 +302,19 @@ def async_enable_logging(
# If the above initialization failed for any reason, setup the default
# formatting. If the above succeeds, this will result in a no-op.
logging.basicConfig(format=fmt, datefmt=FORMAT_DATETIME, level=logging.INFO)
# Capture warnings.warn(...) and friends messages in logs.
# The standard destination for them is stderr, which may end up unnoticed.
# This way they're where other messages are, and can be filtered as usual.
logging.captureWarnings(True)
logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO)
# Suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)
sys.excepthook = lambda *args: logging.getLogger(None).exception(
"Uncaught exception", exc_info=args
"Uncaught exception", exc_info=args # type: ignore
)
threading.excepthook = lambda args: logging.getLogger(None).exception(
"Uncaught thread exception",
exc_info=( # type: ignore[arg-type]
args.exc_type,
args.exc_value,
args.exc_traceback,
),
exc_info=(args.exc_type, args.exc_value, args.exc_traceback), # type: ignore[arg-type]
)
# Log errors to a file if we have write access to file or config dir
@@ -518,10 +331,8 @@ def async_enable_logging(
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
not err_path_exists and os.access(err_dir, os.W_OK)
):
err_handler: (
logging.handlers.RotatingFileHandler
| logging.handlers.TimedRotatingFileHandler
)
err_handler: logging.handlers.RotatingFileHandler | logging.handlers.TimedRotatingFileHandler
if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when="midnight", backupCount=log_rotate_days
@@ -537,7 +348,7 @@ def async_enable_logging(
_LOGGER.error("Error rolling over log file: %s", err)
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
logger = logging.getLogger("")
logger.addHandler(err_handler)
@@ -557,7 +368,8 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
This function is a coroutine.
"""
deps_dir = os.path.join(config_dir, "deps")
if (lib_dir := await async_get_user_site(deps_dir)) not in sys.path:
lib_dir = await async_get_user_site(deps_dir)
if lib_dir not in sys.path:
sys.path.insert(0, lib_dir)
return deps_dir
@@ -566,90 +378,46 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
"""Get domains of components to set up."""
# Filter out the repeating and common config section [homeassistant]
domains = {
domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
}
domains = {key.split(" ")[0] for key in config if key != core.DOMAIN}
# Add config entry and default domains
if not hass.config.recovery_mode:
domains.update(DEFAULT_INTEGRATIONS)
# Add config entry domains
if not hass.config.safe_mode:
domains.update(hass.config_entries.async_domains())
else:
domains.update(DEFAULT_INTEGRATIONS_RECOVERY_MODE)
# Add domains depending on if the Supervisor is used or not
if "SUPERVISOR" in os.environ:
domains.update(DEFAULT_INTEGRATIONS_SUPERVISOR)
# Make sure the Hass.io component is loaded
if "HASSIO" in os.environ:
domains.add("hassio")
return domains
class _WatchPendingSetups:
"""Periodic log and dispatch of setups that are pending."""
def __init__(
self, hass: core.HomeAssistant, setup_started: dict[str, float]
) -> None:
"""Initialize the WatchPendingSetups class."""
self._hass = hass
self._setup_started = setup_started
self._duration_count = 0
self._handle: asyncio.TimerHandle | None = None
self._previous_was_empty = True
self._loop = hass.loop
def _async_watch(self) -> None:
"""Periodic log of setups that are pending."""
now = monotonic()
self._duration_count += SLOW_STARTUP_CHECK_INTERVAL
async def _async_watch_pending_setups(hass: core.HomeAssistant) -> None:
"""Periodic log of setups that are pending for longer than LOG_SLOW_STARTUP_INTERVAL."""
loop_count = 0
setup_started: dict[str, datetime] = hass.data[DATA_SETUP_STARTED]
previous_was_empty = True
while True:
now = dt_util.utcnow()
remaining_with_setup_started = {
domain: (now - start_time)
for domain, start_time in self._setup_started.items()
domain: (now - setup_started[domain]).total_seconds()
for domain in setup_started
}
if remaining_with_setup_started:
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
elif waiting_tasks := self._hass._active_tasks: # pylint: disable=protected-access
_LOGGER.debug("Waiting on tasks: %s", waiting_tasks)
self._async_dispatch(remaining_with_setup_started)
if (
self._setup_started
and self._duration_count % LOG_SLOW_STARTUP_INTERVAL == 0
):
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
if remaining_with_setup_started or not previous_was_empty:
async_dispatcher_send(
hass, SIGNAL_BOOTSTRAP_INTEGRATONS, remaining_with_setup_started
)
previous_was_empty = not remaining_with_setup_started
await asyncio.sleep(SLOW_STARTUP_CHECK_INTERVAL)
loop_count += SLOW_STARTUP_CHECK_INTERVAL
if loop_count >= LOG_SLOW_STARTUP_INTERVAL and setup_started:
_LOGGER.warning(
"Waiting on integrations to complete setup: %s",
", ".join(self._setup_started),
", ".join(setup_started),
)
_LOGGER.debug("Running timeout Zones: %s", self._hass.timeout.zones)
self._async_schedule_next()
def _async_dispatch(self, remaining_with_setup_started: dict[str, float]) -> None:
"""Dispatch the signal."""
if remaining_with_setup_started or not self._previous_was_empty:
async_dispatcher_send(
self._hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
)
self._previous_was_empty = not remaining_with_setup_started
def _async_schedule_next(self) -> None:
"""Schedule the next call."""
self._handle = self._loop.call_later(
SLOW_STARTUP_CHECK_INTERVAL, self._async_watch
)
def async_start(self) -> None:
"""Start watching."""
self._async_schedule_next()
def async_stop(self) -> None:
"""Stop watching."""
self._async_dispatch({})
if self._handle:
self._handle.cancel()
self._handle = None
loop_count = 0
_LOGGER.debug("Running timeout Zones: %s", hass.timeout.zones)
async def async_setup_multi_components(
@@ -658,166 +426,94 @@ async def async_setup_multi_components(
config: dict[str, Any],
) -> None:
"""Set up multiple domains. Log on failure."""
# Avoid creating tasks for domains that were setup in a previous stage
domains_not_yet_setup = domains - hass.config.components
futures = {
domain: hass.async_create_task(
async_setup_component(hass, domain, config),
f"setup component {domain}",
eager_start=True,
)
for domain in domains_not_yet_setup
domain: hass.async_create_task(async_setup_component(hass, domain, config))
for domain in domains
}
results = await asyncio.gather(*futures.values(), return_exceptions=True)
for idx, domain in enumerate(futures):
result = results[idx]
if isinstance(result, BaseException):
_LOGGER.error(
"Error setting up integration %s - received exception",
domain,
exc_info=(type(result), result, result.__traceback__),
)
async def _async_resolve_domains_to_setup(
hass: core.HomeAssistant, config: dict[str, Any]
) -> tuple[set[str], dict[str, loader.Integration]]:
"""Resolve all dependencies and return list of domains to set up."""
base_platforms_loaded = False
domains_to_setup = _get_domains(hass, config)
needed_requirements: set[str] = set()
platform_integrations = conf_util.extract_platform_integrations(
config, BASE_PLATFORMS
)
# Resolve all dependencies so we know all integrations
# that will have to be loaded and start rightaway
integration_cache: dict[str, loader.Integration] = {}
to_resolve: set[str] = domains_to_setup
while to_resolve:
old_to_resolve: set[str] = to_resolve
to_resolve = set()
if not base_platforms_loaded:
# Load base platforms right away since
# we do not require the manifest to list
# them as dependencies and we want
# to avoid the lock contention when multiple
# integrations try to resolve them at once
base_platforms_loaded = True
to_get = {*old_to_resolve, *BASE_PLATFORMS, *platform_integrations}
else:
to_get = old_to_resolve
manifest_deps: set[str] = set()
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
integrations_to_process: list[loader.Integration] = []
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
if not isinstance(itg, loader.Integration):
continue
integration_cache[domain] = itg
needed_requirements.update(itg.requirements)
if domain not in old_to_resolve:
continue
integrations_to_process.append(itg)
manifest_deps.update(itg.dependencies)
manifest_deps.update(itg.after_dependencies)
if not itg.all_dependencies_resolved:
resolve_dependencies_tasks.append(
create_eager_task(
itg.resolve_dependencies(),
name=f"resolve dependencies {domain}",
loop=hass.loop,
)
)
if unseen_deps := manifest_deps - integration_cache.keys():
# If there are dependencies, try to preload all
# the integrations manifest at once and add them
# to the list of requirements we need to install
# so we can try to check if they are already installed
# in a single call below which avoids each integration
# having to wait for the lock to do it individually
deps = await loader.async_get_integrations(hass, unseen_deps)
for dependant_domain, dependant_itg in deps.items():
if isinstance(dependant_itg, loader.Integration):
integration_cache[dependant_domain] = dependant_itg
needed_requirements.update(dependant_itg.requirements)
if resolve_dependencies_tasks:
await asyncio.gather(*resolve_dependencies_tasks)
for itg in integrations_to_process:
for dep in itg.all_dependencies:
if dep in domains_to_setup:
continue
domains_to_setup.add(dep)
to_resolve.add(dep)
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
# Optimistically check if requirements are already installed
# ahead of setting up the integrations so we can prime the cache
# We do not wait for this since its an optimization only
hass.async_create_background_task(
requirements.async_load_installed_versions(hass, needed_requirements),
"check installed requirements",
eager_start=True,
)
# Start loading translations for all integrations we are going to set up
# in the background so they are ready when we need them. This avoids a
# lot of waiting for the translation load lock and a thundering herd of
# tasks trying to load the same translations at the same time as each
# integration is loaded.
#
# We do not wait for this since as soon as the task runs it will
# hold the translation load lock and if anything is fast enough to
# wait for the translation load lock, loading will be done by the
# time it gets to it.
hass.async_create_background_task(
translation.async_load_integrations(
hass, {*BASE_PLATFORMS, *platform_integrations, *domains_to_setup}
),
"load translations",
eager_start=True,
)
return domains_to_setup, integration_cache
await asyncio.wait(futures.values())
errors = [domain for domain in domains if futures[domain].exception()]
for domain in errors:
exception = futures[domain].exception()
assert exception is not None
_LOGGER.error(
"Error setting up integration %s - received exception",
domain,
exc_info=(type(exception), exception, exception.__traceback__),
)
async def _async_set_up_integrations(
hass: core.HomeAssistant, config: dict[str, Any]
) -> None:
"""Set up all the integrations."""
setup_started: dict[str, float] = {}
hass.data[DATA_SETUP_STARTED] = setup_started
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
hass.data[DATA_SETUP_STARTED] = {}
setup_time = hass.data[DATA_SETUP_TIME] = {}
watcher = _WatchPendingSetups(hass, setup_started)
watcher.async_start()
watch_task = asyncio.create_task(_async_watch_pending_setups(hass))
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
hass, config
)
domains_to_setup = _get_domains(hass, config)
# Initialize recorder
if "recorder" in domains_to_setup:
recorder.async_initialize_recorder(hass)
# Resolve all dependencies so we know all integrations
# that will have to be loaded and start rightaway
integration_cache: dict[str, loader.Integration] = {}
to_resolve = domains_to_setup
while to_resolve:
old_to_resolve = to_resolve
to_resolve = set()
pre_stage_domains: dict[str, set[str]] = {
name: domains_to_setup & domain_group
for name, domain_group in SETUP_ORDER.items()
}
integrations_to_process = [
int_or_exc
for int_or_exc in await gather_with_concurrency(
loader.MAX_LOAD_CONCURRENTLY,
*(
loader.async_get_integration(hass, domain)
for domain in old_to_resolve
),
return_exceptions=True,
)
if isinstance(int_or_exc, loader.Integration)
]
resolve_dependencies_tasks = [
itg.resolve_dependencies()
for itg in integrations_to_process
if not itg.all_dependencies_resolved
]
if resolve_dependencies_tasks:
await asyncio.gather(*resolve_dependencies_tasks)
for itg in integrations_to_process:
integration_cache[itg.domain] = itg
for dep in itg.all_dependencies:
if dep in domains_to_setup:
continue
domains_to_setup.add(dep)
to_resolve.add(dep)
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
logging_domains = domains_to_setup & LOGGING_INTEGRATIONS
# Load logging as soon as possible
if logging_domains:
_LOGGER.info("Setting up logging: %s", logging_domains)
await async_setup_multi_components(hass, logging_domains, config)
# Start up debuggers. Start these first in case they want to wait.
debuggers = domains_to_setup & DEBUGGER_INTEGRATIONS
if debuggers:
_LOGGER.debug("Setting up debuggers: %s", debuggers)
await async_setup_multi_components(hass, debuggers, config)
# calculate what components to setup in what stage
stage_1_domains: set[str] = set()
stage_1_domains = set()
# Find all dependencies of any dependency of any stage 1 integration that
# we plan on loading and promote them to stage 1. This is done only to not
# get misleading log messages
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
# we plan on loading and promote them to stage 1
deps_promotion = STAGE_1_INTEGRATIONS
while deps_promotion:
old_deps_promotion = deps_promotion
deps_promotion = set()
@@ -828,21 +524,21 @@ async def _async_set_up_integrations(
stage_1_domains.add(domain)
if (dep_itg := integration_cache.get(domain)) is None:
dep_itg = integration_cache.get(domain)
if dep_itg is None:
continue
deps_promotion.update(dep_itg.all_dependencies)
stage_2_domains = domains_to_setup - stage_1_domains
stage_2_domains = domains_to_setup - logging_domains - debuggers - stage_1_domains
for name, domain_group in pre_stage_domains.items():
if domain_group:
stage_2_domains -= domain_group
_LOGGER.info("Setting up %s: %s", name, domain_group)
await async_setup_multi_components(hass, domain_group, config)
# Enables after dependencies when setting up stage 1 domains
async_set_domains_to_be_loaded(hass, stage_1_domains)
# Load the registries
await asyncio.gather(
device_registry.async_load(hass),
entity_registry.async_load(hass),
area_registry.async_load(hass),
)
# Start setup
if stage_1_domains:
@@ -852,13 +548,10 @@ async def _async_set_up_integrations(
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
):
await async_setup_multi_components(hass, stage_1_domains, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 1 waiting on %s - moving forward",
hass._active_tasks, # pylint: disable=protected-access
)
except asyncio.TimeoutError:
_LOGGER.warning("Setup timed out for stage 1 - moving forward")
# Add after dependencies when setting up stage 2 domains
# Enables after dependencies
async_set_domains_to_be_loaded(hass, stage_2_domains)
if stage_2_domains:
@@ -868,26 +561,26 @@ async def _async_set_up_integrations(
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
):
await async_setup_multi_components(hass, stage_2_domains, config)
except TimeoutError:
_LOGGER.warning(
"Setup timed out for stage 2 waiting on %s - moving forward",
hass._active_tasks, # pylint: disable=protected-access
except asyncio.TimeoutError:
_LOGGER.warning("Setup timed out for stage 2 - moving forward")
watch_task.cancel()
async_dispatcher_send(hass, SIGNAL_BOOTSTRAP_INTEGRATONS, {})
_LOGGER.debug(
"Integration setup times: %s",
{
integration: timedelta.total_seconds()
for integration, timedelta in sorted(
setup_time.items(), key=lambda item: item[1].total_seconds() # type: ignore
)
},
)
# Wrap up startup
_LOGGER.debug("Waiting for startup to wrap up")
try:
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
await hass.async_block_till_done()
except TimeoutError:
_LOGGER.warning(
"Setup timed out for bootstrap waiting on %s - moving forward",
hass._active_tasks, # pylint: disable=protected-access
)
watcher.async_stop()
_LOGGER.debug(
"Integration setup times: %s",
dict(sorted(setup_time.items(), key=itemgetter(1))),
)
except asyncio.TimeoutError:
_LOGGER.warning("Setup timed out for bootstrap - moving forward")

View File

@@ -1,5 +0,0 @@
{
"domain": "airthings",
"name": "Airthings",
"integrations": ["airthings", "airthings_ble"]
}

View File

@@ -1,5 +0,0 @@
{
"domain": "airvisual",
"name": "AirVisual",
"integrations": ["airvisual", "airvisual_pro"]
}

Some files were not shown because too many files have changed in this diff Show More