mirror of
https://github.com/home-assistant/core.git
synced 2026-01-04 23:05:26 +01:00
Compare commits
429 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b8ad64ba5 | ||
|
|
e64761b15e | ||
|
|
61273ff606 | ||
|
|
5dc29bd2c3 | ||
|
|
20c316bce4 | ||
|
|
8b475f45e9 | ||
|
|
a4318682f7 | ||
|
|
a14d8057ed | ||
|
|
55f8b0a2f5 | ||
|
|
bb37300a48 | ||
|
|
0f12b37977 | ||
|
|
7f18739267 | ||
|
|
a1b478b3ac | ||
|
|
edf1f44668 | ||
|
|
60f780cc37 | ||
|
|
7d0cc7e26c | ||
|
|
864a254071 | ||
|
|
5995c6a2ac | ||
|
|
ed0cfc4f31 | ||
|
|
6db069881b | ||
|
|
ca4f69f557 | ||
|
|
37ccf87516 | ||
|
|
201c9fed77 | ||
|
|
3b5775573b | ||
|
|
6e22a0e4d9 | ||
|
|
ce5b4cd51e | ||
|
|
538236de8f | ||
|
|
1007bb83aa | ||
|
|
79955a5785 | ||
|
|
e60f9ca392 | ||
|
|
ae581694ac | ||
|
|
70fe463ef0 | ||
|
|
84858f5c19 | ||
|
|
a6ba5ec1c8 | ||
|
|
c2fe0d0120 | ||
|
|
b6ca03ce47 | ||
|
|
23f1b49e55 | ||
|
|
6e3ec97acf | ||
|
|
4a6afc5614 | ||
|
|
b557c17f76 | ||
|
|
c587536547 | ||
|
|
4c6394b307 | ||
|
|
534233388c | ||
|
|
43b31e88ba | ||
|
|
6197fe0121 | ||
|
|
1f6331c69d | ||
|
|
fd568d77c7 | ||
|
|
f32098abe4 | ||
|
|
b65d7daed8 | ||
|
|
9ea0c409e6 | ||
|
|
2ee62b10bc | ||
|
|
dbdd0a1f56 | ||
|
|
df8c59406b | ||
|
|
c5a2ffbcb9 | ||
|
|
e62bb299ff | ||
|
|
6ee8d9bd65 | ||
|
|
14a34f8c4b | ||
|
|
3b93fa80be | ||
|
|
57977bcef3 | ||
|
|
0d4841cbea | ||
|
|
f7d7d825b0 | ||
|
|
1d1408b98d | ||
|
|
b9eb0081cd | ||
|
|
287b1bce15 | ||
|
|
ec3d2e97e8 | ||
|
|
1ff329d9d6 | ||
|
|
703d71c064 | ||
|
|
a2a4c633f3 | ||
|
|
e6dd4f6e13 | ||
|
|
b327ea2023 | ||
|
|
b333dba875 | ||
|
|
02238b6412 | ||
|
|
bd62248841 | ||
|
|
dabbd7bd63 | ||
|
|
b5c7afcf75 | ||
|
|
f8f8da959a | ||
|
|
9970965718 | ||
|
|
a1d8b0e9b3 | ||
|
|
1e7cfc04af | ||
|
|
0f1bcfd63b | ||
|
|
f65c3940ae | ||
|
|
46de89e1a3 | ||
|
|
852526e10a | ||
|
|
91d6d0df84 | ||
|
|
cb129bd207 | ||
|
|
a6e9dc81aa | ||
|
|
5f7ac09a74 | ||
|
|
42775142f8 | ||
|
|
2525fc52b3 | ||
|
|
07dde62e70 | ||
|
|
cb458b7745 | ||
|
|
b2df199674 | ||
|
|
857c58c4b7 | ||
|
|
b82371f44b | ||
|
|
1c525968d1 | ||
|
|
5ec61e4649 | ||
|
|
184d0a99c0 | ||
|
|
232f56de62 | ||
|
|
66e33c7979 | ||
|
|
6420ab5535 | ||
|
|
ed3fe1cc6f | ||
|
|
cd1cfd7e8e | ||
|
|
31e23ebae2 | ||
|
|
fb65276daf | ||
|
|
bedd2d7e41 | ||
|
|
120111ceee | ||
|
|
e6390b8e41 | ||
|
|
d7fd9247a9 | ||
|
|
0dc155c4d3 | ||
|
|
0feb4c5439 | ||
|
|
f3588a8782 | ||
|
|
2145ac5e46 | ||
|
|
c39e6b9618 | ||
|
|
855cbc0aed | ||
|
|
00c366d7ea | ||
|
|
3c3a53a137 | ||
|
|
dd59054003 | ||
|
|
36f566a529 | ||
|
|
4d93a9fd38 | ||
|
|
d3df96a8de | ||
|
|
6c77702dcc | ||
|
|
86165750ff | ||
|
|
63b28aa39d | ||
|
|
279fd39677 | ||
|
|
c978281d1e | ||
|
|
311a44007c | ||
|
|
47401739ea | ||
|
|
11ba7cc8ce | ||
|
|
c3ad30ec87 | ||
|
|
a64a66dd62 | ||
|
|
dffe36761d | ||
|
|
0a186650bf | ||
|
|
6c77c9d372 | ||
|
|
4a4b9180d8 | ||
|
|
5d6db9a915 | ||
|
|
235282e335 | ||
|
|
6f582dcf24 | ||
|
|
9db8759317 | ||
|
|
136cc1d44d | ||
|
|
4c258ce08b | ||
|
|
3c04b0756f | ||
|
|
c0229ebb77 | ||
|
|
cfe7c0aa01 | ||
|
|
f874efb224 | ||
|
|
3da4642194 | ||
|
|
0aad056ca7 | ||
|
|
c5ceb40598 | ||
|
|
27a37e2013 | ||
|
|
10d1e81f10 | ||
|
|
56bbadb501 | ||
|
|
fa79aead9a | ||
|
|
24fec3e826 | ||
|
|
2524dca7bf | ||
|
|
56f17b8651 | ||
|
|
49623d2dad | ||
|
|
66479dc2e5 | ||
|
|
bbbec5a056 | ||
|
|
94b55efef3 | ||
|
|
fd38caa287 | ||
|
|
c61a652c90 | ||
|
|
e3e014bccc | ||
|
|
26590e244c | ||
|
|
39971ee919 | ||
|
|
2205090795 | ||
|
|
a277470363 | ||
|
|
19f2bbf52f | ||
|
|
dbb786c548 | ||
|
|
4fbe3bb070 | ||
|
|
9066ac44fe | ||
|
|
742144f401 | ||
|
|
c0b6a857f7 | ||
|
|
d6dee62c92 | ||
|
|
41017f10a3 | ||
|
|
ba50a5c329 | ||
|
|
4208bb457d | ||
|
|
15af6b1ad9 | ||
|
|
3921dc77a6 | ||
|
|
0094fd5c34 | ||
|
|
d58e401812 | ||
|
|
c79c94550f | ||
|
|
9b950f5192 | ||
|
|
2520fddbdf | ||
|
|
3f21966ec9 | ||
|
|
69502163bd | ||
|
|
893e0f8db6 | ||
|
|
1c8b52f630 | ||
|
|
6e4fb7a937 | ||
|
|
ab1939f56f | ||
|
|
15507df407 | ||
|
|
46ea28a4f8 | ||
|
|
c8458fd7c5 | ||
|
|
e681a7929c | ||
|
|
b2d37ccef6 | ||
|
|
9dd2c36de4 | ||
|
|
b92350fb55 | ||
|
|
6c0fc65eaf | ||
|
|
42ba2a68ce | ||
|
|
508d0459a7 | ||
|
|
dbae410cf4 | ||
|
|
ae51dc08bf | ||
|
|
672a3c7178 | ||
|
|
f8bc3411ad | ||
|
|
038168c417 | ||
|
|
73034c933e | ||
|
|
d3ceb9080c | ||
|
|
3893d8a876 | ||
|
|
05924a2868 | ||
|
|
021d08a9c4 | ||
|
|
5a71a22fb9 | ||
|
|
6064932e2e | ||
|
|
9de7034d0e | ||
|
|
96d5684a89 | ||
|
|
91962e2681 | ||
|
|
ee31f89049 | ||
|
|
370c3f28b8 | ||
|
|
66110a7d57 | ||
|
|
c419cbb46f | ||
|
|
a02d7989d5 | ||
|
|
7325847fa9 | ||
|
|
124495dd84 | ||
|
|
0c01f3a0fe | ||
|
|
0ea2d99910 | ||
|
|
6456f66b47 | ||
|
|
94eee6d069 | ||
|
|
6e5a2a77ab | ||
|
|
35b609dd8b | ||
|
|
0df99f8762 | ||
|
|
6781ecf159 | ||
|
|
a4b843eb2d | ||
|
|
302717e8a1 | ||
|
|
617647c5fd | ||
|
|
4b5d578c08 | ||
|
|
bfc55137ea | ||
|
|
e98e7e2751 | ||
|
|
b687de879c | ||
|
|
4048ad36a8 | ||
|
|
8c2f0e3b30 | ||
|
|
6cabbd2592 | ||
|
|
8d22754a06 | ||
|
|
be6d1b5e94 | ||
|
|
c84f1d7d33 | ||
|
|
49845d9398 | ||
|
|
895306f822 | ||
|
|
a729742757 | ||
|
|
6bc03ee763 | ||
|
|
75580dfade | ||
|
|
1f8699d9b4 | ||
|
|
fca5d55b43 | ||
|
|
659616a4eb | ||
|
|
3b4f7b4f5d | ||
|
|
62432ced90 | ||
|
|
27873b4457 | ||
|
|
1e7333eeb6 | ||
|
|
7cd620d30f | ||
|
|
7a180ac205 | ||
|
|
153ccda853 | ||
|
|
067e4f6d9a | ||
|
|
9d6ce609f9 | ||
|
|
9800b74a6d | ||
|
|
ef5b2a2492 | ||
|
|
60179a1cbb | ||
|
|
e0cea2d18d | ||
|
|
e29dfa8609 | ||
|
|
ef39bca52e | ||
|
|
5a3ea74a26 | ||
|
|
8869617890 | ||
|
|
62f970e486 | ||
|
|
f9a21dbfda | ||
|
|
86c6b4d8e3 | ||
|
|
7bfa81c592 | ||
|
|
d07e40c483 | ||
|
|
0e7e58f172 | ||
|
|
cbdfc95cc8 | ||
|
|
1642502a70 | ||
|
|
33ebd99068 | ||
|
|
9c17e95fc5 | ||
|
|
1533bc1e1f | ||
|
|
da3695dccc | ||
|
|
40c8f5f70e | ||
|
|
3ceee66e1b | ||
|
|
6b908b6f4e | ||
|
|
a74b081d44 | ||
|
|
bc8093c73b | ||
|
|
ca2712506b | ||
|
|
c871e8da5d | ||
|
|
722c27f1e2 | ||
|
|
e3fcf46566 | ||
|
|
1117371b31 | ||
|
|
addca54118 | ||
|
|
471d6e45eb | ||
|
|
7238205adb | ||
|
|
3db5d5bbf9 | ||
|
|
65970a2248 | ||
|
|
5d82f48c02 | ||
|
|
8e185bc300 | ||
|
|
a013908115 | ||
|
|
bdf6257640 | ||
|
|
1f50e335fa | ||
|
|
1375adfeab | ||
|
|
00cbdffa12 | ||
|
|
c5f012c85a | ||
|
|
656eae288e | ||
|
|
5898307715 | ||
|
|
9b0efdc8c8 | ||
|
|
87f9f17335 | ||
|
|
f101f6b7cb | ||
|
|
abf07b60f0 | ||
|
|
0b114f0755 | ||
|
|
3ee8f58fdf | ||
|
|
ff4da05267 | ||
|
|
17308a2730 | ||
|
|
7d9bce2153 | ||
|
|
2839f0ff5f | ||
|
|
2ec295a6f8 | ||
|
|
4bd7a7eee3 | ||
|
|
d0cbbe6141 | ||
|
|
9efa31ef9f | ||
|
|
8a777f6e78 | ||
|
|
ac13a2736b | ||
|
|
940577e105 | ||
|
|
c917470836 | ||
|
|
47a344f3a1 | ||
|
|
f744a29d9d | ||
|
|
3cd4cb741c | ||
|
|
1128104281 | ||
|
|
d6d685a483 | ||
|
|
2c6e6c2a6f | ||
|
|
c8e0de19b6 | ||
|
|
b2440a6d95 | ||
|
|
c36c3f0d64 | ||
|
|
0e7d284c83 | ||
|
|
cdd111df49 | ||
|
|
cccd0deb65 | ||
|
|
e014a84215 | ||
|
|
d549e26a9b | ||
|
|
08adfd87f7 | ||
|
|
65b0ec6615 | ||
|
|
cb646e48d0 | ||
|
|
fecce206a9 | ||
|
|
176ef411de | ||
|
|
2ac23c8be6 | ||
|
|
a373793029 | ||
|
|
3153b0c8fc | ||
|
|
89d008d1f3 | ||
|
|
6755ae2605 | ||
|
|
c18033ba85 | ||
|
|
cdc5388dc9 | ||
|
|
be4776d039 | ||
|
|
30111ea417 | ||
|
|
576c806e86 | ||
|
|
1c561eaf0d | ||
|
|
1da30032a0 | ||
|
|
d5bbb6ffd2 | ||
|
|
b4e5695bbd | ||
|
|
716ab0433f | ||
|
|
7d9ef97bda | ||
|
|
703b4354e0 | ||
|
|
ce0ca7ff90 | ||
|
|
54e87836f6 | ||
|
|
5f4aa6d2ba | ||
|
|
dc447a75c6 | ||
|
|
ce7e9e36dd | ||
|
|
8aca2e84dc | ||
|
|
f3e55ce330 | ||
|
|
20caeb5383 | ||
|
|
bc0d0751b9 | ||
|
|
5393b073fe | ||
|
|
d7b7370c82 | ||
|
|
5f65f67f1e | ||
|
|
f242418986 | ||
|
|
d3d9d9ebf2 | ||
|
|
6af995026b | ||
|
|
b657cff6ba | ||
|
|
e3fba79126 | ||
|
|
bb0068908d | ||
|
|
0748466ffc | ||
|
|
fe018fd58c | ||
|
|
10317a0f71 | ||
|
|
87d55834be | ||
|
|
d4cc806cd5 | ||
|
|
1a7e8c88a3 | ||
|
|
90a51160c4 | ||
|
|
50321a29b5 | ||
|
|
67d137cfd5 | ||
|
|
bb4d1773d3 | ||
|
|
a6c1192bfc | ||
|
|
d14d2fe588 | ||
|
|
f696331563 | ||
|
|
6b2b92a732 | ||
|
|
83ce9450f7 | ||
|
|
0b405c33c4 | ||
|
|
bf74cab7af | ||
|
|
d8adb4bdb0 | ||
|
|
33990badcd | ||
|
|
8061f15aec | ||
|
|
25f7c31911 | ||
|
|
bb98331ba4 | ||
|
|
07d139b3a8 | ||
|
|
f4ef8fd1bc | ||
|
|
ba836c2e36 | ||
|
|
a0ab356936 | ||
|
|
734a83c657 | ||
|
|
b42f4012d1 | ||
|
|
8501312292 | ||
|
|
3faed2edc1 | ||
|
|
bc70619b17 | ||
|
|
bef15264b7 | ||
|
|
6d26915c69 | ||
|
|
fa2e6ada26 | ||
|
|
a6880c452f | ||
|
|
4bccb0d2a1 | ||
|
|
103639455c | ||
|
|
549abd9c7e | ||
|
|
f1aba5511f | ||
|
|
21d05a8b4d | ||
|
|
cb6c869c2f | ||
|
|
640e499964 | ||
|
|
b3b4f7468d | ||
|
|
ad9621ebe5 | ||
|
|
e370d523ec | ||
|
|
61a41bb8fc | ||
|
|
2da6d3c223 | ||
|
|
816efa02d1 | ||
|
|
bd1b1a9ff9 | ||
|
|
1d23f7f900 | ||
|
|
39843a73de | ||
|
|
aec425d1f6 | ||
|
|
855ed2b4e4 | ||
|
|
29e659cf4c |
35
.coveragerc
35
.coveragerc
@@ -61,6 +61,11 @@ omit =
|
||||
homeassistant/components/coinbase.py
|
||||
homeassistant/components/sensor/coinbase.py
|
||||
|
||||
homeassistant/components/cast/*
|
||||
homeassistant/components/*/cast.py
|
||||
|
||||
homeassistant/components/cloudflare.py
|
||||
|
||||
homeassistant/components/comfoconnect.py
|
||||
homeassistant/components/*/comfoconnect.py
|
||||
|
||||
@@ -97,7 +102,7 @@ omit =
|
||||
homeassistant/components/*/envisalink.py
|
||||
|
||||
homeassistant/components/fritzbox.py
|
||||
homeassistant/components/*/fritzbox.py
|
||||
homeassistant/components/switch/fritzbox.py
|
||||
|
||||
homeassistant/components/eufy.py
|
||||
homeassistant/components/*/eufy.py
|
||||
@@ -189,18 +194,21 @@ omit =
|
||||
homeassistant/components/mychevy.py
|
||||
homeassistant/components/*/mychevy.py
|
||||
|
||||
homeassistant/components/mysensors.py
|
||||
homeassistant/components/mysensors/*
|
||||
homeassistant/components/*/mysensors.py
|
||||
|
||||
homeassistant/components/neato.py
|
||||
homeassistant/components/*/neato.py
|
||||
|
||||
homeassistant/components/nest.py
|
||||
homeassistant/components/nest/__init__.py
|
||||
homeassistant/components/*/nest.py
|
||||
|
||||
homeassistant/components/netatmo.py
|
||||
homeassistant/components/*/netatmo.py
|
||||
|
||||
homeassistant/components/netgear_lte.py
|
||||
homeassistant/components/*/netgear_lte.py
|
||||
|
||||
homeassistant/components/octoprint.py
|
||||
homeassistant/components/*/octoprint.py
|
||||
|
||||
@@ -249,6 +257,9 @@ omit =
|
||||
homeassistant/components/smappee.py
|
||||
homeassistant/components/*/smappee.py
|
||||
|
||||
homeassistant/components/sonos/__init__.py
|
||||
homeassistant/components/*/sonos.py
|
||||
|
||||
homeassistant/components/tado.py
|
||||
homeassistant/components/*/tado.py
|
||||
|
||||
@@ -311,6 +322,9 @@ omit =
|
||||
homeassistant/components/wink/*
|
||||
homeassistant/components/*/wink.py
|
||||
|
||||
homeassistant/components/wirelesstag.py
|
||||
homeassistant/components/*/wirelesstag.py
|
||||
|
||||
homeassistant/components/xiaomi_aqara.py
|
||||
homeassistant/components/*/xiaomi_aqara.py
|
||||
|
||||
@@ -329,6 +343,9 @@ omit =
|
||||
homeassistant/components/zoneminder.py
|
||||
homeassistant/components/*/zoneminder.py
|
||||
|
||||
homeassistant/components/tuya.py
|
||||
homeassistant/components/*/tuya.py
|
||||
|
||||
homeassistant/components/alarm_control_panel/alarmdotcom.py
|
||||
homeassistant/components/alarm_control_panel/canary.py
|
||||
homeassistant/components/alarm_control_panel/concord232.py
|
||||
@@ -348,6 +365,7 @@ omit =
|
||||
homeassistant/components/binary_sensor/ping.py
|
||||
homeassistant/components/binary_sensor/rest.py
|
||||
homeassistant/components/binary_sensor/tapsaff.py
|
||||
homeassistant/components/binary_sensor/uptimerobot.py
|
||||
homeassistant/components/browser.py
|
||||
homeassistant/components/calendar/caldav.py
|
||||
homeassistant/components/calendar/todoist.py
|
||||
@@ -363,6 +381,7 @@ omit =
|
||||
homeassistant/components/camera/rpi_camera.py
|
||||
homeassistant/components/camera/synology.py
|
||||
homeassistant/components/camera/xeoma.py
|
||||
homeassistant/components/camera/xiaomi.py
|
||||
homeassistant/components/camera/yi.py
|
||||
homeassistant/components/climate/econet.py
|
||||
homeassistant/components/climate/ephember.py
|
||||
@@ -378,6 +397,7 @@ omit =
|
||||
homeassistant/components/climate/sensibo.py
|
||||
homeassistant/components/climate/touchline.py
|
||||
homeassistant/components/climate/venstar.py
|
||||
homeassistant/components/climate/zhong_hong.py
|
||||
homeassistant/components/cover/garadget.py
|
||||
homeassistant/components/cover/gogogate2.py
|
||||
homeassistant/components/cover/homematic.py
|
||||
@@ -397,6 +417,7 @@ omit =
|
||||
homeassistant/components/device_tracker/bt_home_hub_5.py
|
||||
homeassistant/components/device_tracker/cisco_ios.py
|
||||
homeassistant/components/device_tracker/ddwrt.py
|
||||
homeassistant/components/device_tracker/freebox.py
|
||||
homeassistant/components/device_tracker/fritz.py
|
||||
homeassistant/components/device_tracker/google_maps.py
|
||||
homeassistant/components/device_tracker/gpslogger.py
|
||||
@@ -462,6 +483,7 @@ omit =
|
||||
homeassistant/components/light/yeelightsunflower.py
|
||||
homeassistant/components/light/zengge.py
|
||||
homeassistant/components/lirc.py
|
||||
homeassistant/components/lock/kiwi.py
|
||||
homeassistant/components/lock/lockitron.py
|
||||
homeassistant/components/lock/nello.py
|
||||
homeassistant/components/lock/nuki.py
|
||||
@@ -472,7 +494,6 @@ omit =
|
||||
homeassistant/components/media_player/aquostv.py
|
||||
homeassistant/components/media_player/bluesound.py
|
||||
homeassistant/components/media_player/braviatv.py
|
||||
homeassistant/components/media_player/cast.py
|
||||
homeassistant/components/media_player/channels.py
|
||||
homeassistant/components/media_player/clementine.py
|
||||
homeassistant/components/media_player/cmus.py
|
||||
@@ -481,10 +502,12 @@ omit =
|
||||
homeassistant/components/media_player/directv.py
|
||||
homeassistant/components/media_player/dunehd.py
|
||||
homeassistant/components/media_player/emby.py
|
||||
homeassistant/components/media_player/epson.py
|
||||
homeassistant/components/media_player/firetv.py
|
||||
homeassistant/components/media_player/frontier_silicon.py
|
||||
homeassistant/components/media_player/gpmdp.py
|
||||
homeassistant/components/media_player/gstreamer.py
|
||||
homeassistant/components/media_player/horizon.py
|
||||
homeassistant/components/media_player/itunes.py
|
||||
homeassistant/components/media_player/kodi.py
|
||||
homeassistant/components/media_player/lg_netcast.py
|
||||
@@ -506,7 +529,6 @@ omit =
|
||||
homeassistant/components/media_player/russound_rnet.py
|
||||
homeassistant/components/media_player/snapcast.py
|
||||
homeassistant/components/media_player/songpal.py
|
||||
homeassistant/components/media_player/sonos.py
|
||||
homeassistant/components/media_player/spotify.py
|
||||
homeassistant/components/media_player/squeezebox.py
|
||||
homeassistant/components/media_player/ue_smart_radio.py
|
||||
@@ -595,6 +617,7 @@ omit =
|
||||
homeassistant/components/sensor/domain_expiry.py
|
||||
homeassistant/components/sensor/dte_energy_bridge.py
|
||||
homeassistant/components/sensor/dublin_bus_transport.py
|
||||
homeassistant/components/sensor/duke_energy.py
|
||||
homeassistant/components/sensor/dwd_weather_warnings.py
|
||||
homeassistant/components/sensor/ebox.py
|
||||
homeassistant/components/sensor/eddystone_temperature.py
|
||||
@@ -644,6 +667,7 @@ omit =
|
||||
homeassistant/components/sensor/nederlandse_spoorwegen.py
|
||||
homeassistant/components/sensor/netdata.py
|
||||
homeassistant/components/sensor/neurio_energy.py
|
||||
homeassistant/components/sensor/nsw_fuel_station.py
|
||||
homeassistant/components/sensor/nut.py
|
||||
homeassistant/components/sensor/nzbget.py
|
||||
homeassistant/components/sensor/ohmconnect.py
|
||||
@@ -748,6 +772,7 @@ omit =
|
||||
homeassistant/components/tts/picotts.py
|
||||
homeassistant/components/vacuum/mqtt.py
|
||||
homeassistant/components/vacuum/roomba.py
|
||||
homeassistant/components/watson_iot.py
|
||||
homeassistant/components/weather/bom.py
|
||||
homeassistant/components/weather/buienradar.py
|
||||
homeassistant/components/weather/darksky.py
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -107,3 +107,6 @@ desktop.ini
|
||||
|
||||
# Secrets
|
||||
.lokalise_token
|
||||
|
||||
# monkeytype
|
||||
monkeytype.sqlite3
|
||||
|
||||
2
.isort.cfg
Normal file
2
.isort.cfg
Normal file
@@ -0,0 +1,2 @@
|
||||
[settings]
|
||||
multi_line_output=4
|
||||
16
.travis.yml
16
.travis.yml
@@ -16,11 +16,17 @@ matrix:
|
||||
env: TOXENV=py35
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
# - python: "3.6-dev"
|
||||
# env: TOXENV=py36
|
||||
# allow_failures:
|
||||
# - python: "3.5"
|
||||
# env: TOXENV=typing
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
dist: xenial
|
||||
- python: "3.8-dev"
|
||||
env: TOXENV=py38
|
||||
dist: xenial
|
||||
if: branch = dev AND type = push
|
||||
allow_failures:
|
||||
- python: "3.8-dev"
|
||||
env: TOXENV=py38
|
||||
dist: xenial
|
||||
|
||||
cache:
|
||||
directories:
|
||||
|
||||
@@ -70,6 +70,7 @@ homeassistant/components/sensor/filter.py @dgomes
|
||||
homeassistant/components/sensor/gearbest.py @HerrHofrat
|
||||
homeassistant/components/sensor/irish_rail_transport.py @ttroy50
|
||||
homeassistant/components/sensor/miflora.py @danielhiversen @ChristianKuehnel
|
||||
homeassistant/components/sensor/nsw_fuel_station.py @nickw444
|
||||
homeassistant/components/sensor/pollen.py @bachya
|
||||
homeassistant/components/sensor/qnap.py @colinodell
|
||||
homeassistant/components/sensor/sma.py @kellerza
|
||||
|
||||
@@ -241,7 +241,7 @@ def cmdline() -> List[str]:
|
||||
|
||||
|
||||
def setup_and_run_hass(config_dir: str,
|
||||
args: argparse.Namespace) -> Optional[int]:
|
||||
args: argparse.Namespace) -> int:
|
||||
"""Set up HASS and run."""
|
||||
from homeassistant import bootstrap
|
||||
|
||||
@@ -274,7 +274,7 @@ def setup_and_run_hass(config_dir: str,
|
||||
log_no_color=args.log_no_color)
|
||||
|
||||
if hass is None:
|
||||
return None
|
||||
return -1
|
||||
|
||||
if args.open_ui:
|
||||
# Imported here to avoid importing asyncio before monkey patch
|
||||
|
||||
@@ -1,503 +0,0 @@
|
||||
"""Provide an authentication layer for Home Assistant."""
|
||||
import asyncio
|
||||
import binascii
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, timedelta
|
||||
import os
|
||||
import importlib
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant import data_entry_flow, requirements
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import CONF_TYPE, CONF_NAME, CONF_ID
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
AUTH_PROVIDERS = Registry()
|
||||
|
||||
AUTH_PROVIDER_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_TYPE): str,
|
||||
vol.Optional(CONF_NAME): str,
|
||||
# Specify ID if you have two auth providers for same type.
|
||||
vol.Optional(CONF_ID): str,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
ACCESS_TOKEN_EXPIRATION = timedelta(minutes=30)
|
||||
DATA_REQS = 'auth_reqs_processed'
|
||||
|
||||
|
||||
def generate_secret(entropy: int = 32) -> str:
|
||||
"""Generate a secret.
|
||||
|
||||
Backport of secrets.token_hex from Python 3.6
|
||||
|
||||
Event loop friendly.
|
||||
"""
|
||||
return binascii.hexlify(os.urandom(entropy)).decode('ascii')
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
|
||||
DEFAULT_TITLE = 'Unnamed auth provider'
|
||||
|
||||
initialized = False
|
||||
|
||||
def __init__(self, hass, store, config):
|
||||
"""Initialize an auth provider."""
|
||||
self.hass = hass
|
||||
self.store = store
|
||||
self.config = config
|
||||
|
||||
@property
|
||||
def id(self): # pylint: disable=invalid-name
|
||||
"""Return id of the auth provider.
|
||||
|
||||
Optional, can be None.
|
||||
"""
|
||||
return self.config.get(CONF_ID)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
"""Return type of the provider."""
|
||||
return self.config[CONF_TYPE]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the auth provider."""
|
||||
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
|
||||
|
||||
async def async_credentials(self):
|
||||
"""Return all credentials of this provider."""
|
||||
return await self.store.credentials_for_provider(self.type, self.id)
|
||||
|
||||
@callback
|
||||
def async_create_credentials(self, data):
|
||||
"""Create credentials."""
|
||||
return Credentials(
|
||||
auth_provider_type=self.type,
|
||||
auth_provider_id=self.id,
|
||||
data=data,
|
||||
)
|
||||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_initialize(self):
|
||||
"""Initialize the auth provider.
|
||||
|
||||
Optional.
|
||||
"""
|
||||
|
||||
async def async_credential_flow(self):
|
||||
"""Return the data flow for logging in with auth provider."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_get_or_create_credentials(self, flow_result):
|
||||
"""Get credentials based on the flow result."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_user_meta_for_credentials(self, credentials):
|
||||
"""Return extra user metadata for credentials.
|
||||
|
||||
Will be used to populate info when creating a new user.
|
||||
"""
|
||||
return {}
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class User:
|
||||
"""A user."""
|
||||
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
is_owner = attr.ib(type=bool, default=False)
|
||||
is_active = attr.ib(type=bool, default=False)
|
||||
name = attr.ib(type=str, default=None)
|
||||
# For persisting and see if saved?
|
||||
# store = attr.ib(type=AuthStore, default=None)
|
||||
|
||||
# List of credentials of a user.
|
||||
credentials = attr.ib(type=list, default=attr.Factory(list))
|
||||
|
||||
# Tokens associated with a user.
|
||||
refresh_tokens = attr.ib(type=dict, default=attr.Factory(dict))
|
||||
|
||||
def as_dict(self):
|
||||
"""Convert user object to a dictionary."""
|
||||
return {
|
||||
'id': self.id,
|
||||
'is_owner': self.is_owner,
|
||||
'is_active': self.is_active,
|
||||
'name': self.name,
|
||||
}
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class RefreshToken:
|
||||
"""RefreshToken for a user to grant new access tokens."""
|
||||
|
||||
user = attr.ib(type=User)
|
||||
client_id = attr.ib(type=str)
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
created_at = attr.ib(type=datetime, default=attr.Factory(dt_util.utcnow))
|
||||
access_token_expiration = attr.ib(type=timedelta,
|
||||
default=ACCESS_TOKEN_EXPIRATION)
|
||||
token = attr.ib(type=str,
|
||||
default=attr.Factory(lambda: generate_secret(64)))
|
||||
access_tokens = attr.ib(type=list, default=attr.Factory(list))
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class AccessToken:
|
||||
"""Access token to access the API.
|
||||
|
||||
These will only ever be stored in memory and not be persisted.
|
||||
"""
|
||||
|
||||
refresh_token = attr.ib(type=RefreshToken)
|
||||
created_at = attr.ib(type=datetime, default=attr.Factory(dt_util.utcnow))
|
||||
token = attr.ib(type=str,
|
||||
default=attr.Factory(generate_secret))
|
||||
|
||||
@property
|
||||
def expires(self):
|
||||
"""Return datetime when this token expires."""
|
||||
return self.created_at + self.refresh_token.access_token_expiration
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Credentials:
|
||||
"""Credentials for a user on an auth provider."""
|
||||
|
||||
auth_provider_type = attr.ib(type=str)
|
||||
auth_provider_id = attr.ib(type=str)
|
||||
|
||||
# Allow the auth provider to store data to represent their auth.
|
||||
data = attr.ib(type=dict)
|
||||
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
is_new = attr.ib(type=bool, default=True)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Client:
|
||||
"""Client that interacts with Home Assistant on behalf of a user."""
|
||||
|
||||
name = attr.ib(type=str)
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
secret = attr.ib(type=str, default=attr.Factory(generate_secret))
|
||||
redirect_uris = attr.ib(type=list, default=attr.Factory(list))
|
||||
|
||||
|
||||
async def load_auth_provider_module(hass, provider):
|
||||
"""Load an auth provider."""
|
||||
try:
|
||||
module = importlib.import_module(
|
||||
'homeassistant.auth_providers.{}'.format(provider))
|
||||
except ImportError:
|
||||
_LOGGER.warning('Unable to find auth provider %s', provider)
|
||||
return None
|
||||
|
||||
if hass.config.skip_pip or not hasattr(module, 'REQUIREMENTS'):
|
||||
return module
|
||||
|
||||
processed = hass.data.get(DATA_REQS)
|
||||
|
||||
if processed is None:
|
||||
processed = hass.data[DATA_REQS] = set()
|
||||
elif provider in processed:
|
||||
return module
|
||||
|
||||
req_success = await requirements.async_process_requirements(
|
||||
hass, 'auth provider {}'.format(provider), module.REQUIREMENTS)
|
||||
|
||||
if not req_success:
|
||||
return None
|
||||
|
||||
return module
|
||||
|
||||
|
||||
async def auth_manager_from_config(hass, provider_configs):
|
||||
"""Initialize an auth manager from config."""
|
||||
store = AuthStore(hass)
|
||||
if provider_configs:
|
||||
providers = await asyncio.gather(
|
||||
*[_auth_provider_from_config(hass, store, config)
|
||||
for config in provider_configs])
|
||||
else:
|
||||
providers = []
|
||||
# So returned auth providers are in same order as config
|
||||
provider_hash = OrderedDict()
|
||||
for provider in providers:
|
||||
if provider is None:
|
||||
continue
|
||||
|
||||
key = (provider.type, provider.id)
|
||||
|
||||
if key in provider_hash:
|
||||
_LOGGER.error(
|
||||
'Found duplicate provider: %s. Please add unique IDs if you '
|
||||
'want to have the same provider twice.', key)
|
||||
continue
|
||||
|
||||
provider_hash[key] = provider
|
||||
manager = AuthManager(hass, store, provider_hash)
|
||||
return manager
|
||||
|
||||
|
||||
async def _auth_provider_from_config(hass, store, config):
|
||||
"""Initialize an auth provider from a config."""
|
||||
provider_name = config[CONF_TYPE]
|
||||
module = await load_auth_provider_module(hass, provider_name)
|
||||
|
||||
if module is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
config = module.CONFIG_SCHEMA(config)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error('Invalid configuration for auth provider %s: %s',
|
||||
provider_name, humanize_error(config, err))
|
||||
return None
|
||||
|
||||
return AUTH_PROVIDERS[provider_name](hass, store, config)
|
||||
|
||||
|
||||
class AuthManager:
|
||||
"""Manage the authentication for Home Assistant."""
|
||||
|
||||
def __init__(self, hass, store, providers):
|
||||
"""Initialize the auth manager."""
|
||||
self._store = store
|
||||
self._providers = providers
|
||||
self.login_flow = data_entry_flow.FlowManager(
|
||||
hass, self._async_create_login_flow,
|
||||
self._async_finish_login_flow)
|
||||
self.access_tokens = {}
|
||||
|
||||
@property
|
||||
def async_auth_providers(self):
|
||||
"""Return a list of available auth providers."""
|
||||
return self._providers.values()
|
||||
|
||||
async def async_get_user(self, user_id):
|
||||
"""Retrieve a user."""
|
||||
return await self._store.async_get_user(user_id)
|
||||
|
||||
async def async_get_or_create_user(self, credentials):
|
||||
"""Get or create a user."""
|
||||
return await self._store.async_get_or_create_user(
|
||||
credentials, self._async_get_auth_provider(credentials))
|
||||
|
||||
async def async_link_user(self, user, credentials):
|
||||
"""Link credentials to an existing user."""
|
||||
await self._store.async_link_user(user, credentials)
|
||||
|
||||
async def async_remove_user(self, user):
|
||||
"""Remove a user."""
|
||||
await self._store.async_remove_user(user)
|
||||
|
||||
async def async_create_refresh_token(self, user, client_id):
|
||||
"""Create a new refresh token for a user."""
|
||||
return await self._store.async_create_refresh_token(user, client_id)
|
||||
|
||||
async def async_get_refresh_token(self, token):
|
||||
"""Get refresh token by token."""
|
||||
return await self._store.async_get_refresh_token(token)
|
||||
|
||||
@callback
|
||||
def async_create_access_token(self, refresh_token):
|
||||
"""Create a new access token."""
|
||||
access_token = AccessToken(refresh_token)
|
||||
self.access_tokens[access_token.token] = access_token
|
||||
return access_token
|
||||
|
||||
@callback
|
||||
def async_get_access_token(self, token):
|
||||
"""Get an access token."""
|
||||
return self.access_tokens.get(token)
|
||||
|
||||
async def async_create_client(self, name, *, redirect_uris=None,
|
||||
no_secret=False):
|
||||
"""Create a new client."""
|
||||
return await self._store.async_create_client(
|
||||
name, redirect_uris, no_secret)
|
||||
|
||||
async def async_get_client(self, client_id):
|
||||
"""Get a client."""
|
||||
return await self._store.async_get_client(client_id)
|
||||
|
||||
async def _async_create_login_flow(self, handler, *, source, data):
|
||||
"""Create a login flow."""
|
||||
auth_provider = self._providers[handler]
|
||||
|
||||
if not auth_provider.initialized:
|
||||
auth_provider.initialized = True
|
||||
await auth_provider.async_initialize()
|
||||
|
||||
return await auth_provider.async_credential_flow()
|
||||
|
||||
async def _async_finish_login_flow(self, result):
|
||||
"""Result of a credential login flow."""
|
||||
if result['type'] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||
return None
|
||||
|
||||
auth_provider = self._providers[result['handler']]
|
||||
return await auth_provider.async_get_or_create_credentials(
|
||||
result['data'])
|
||||
|
||||
@callback
|
||||
def _async_get_auth_provider(self, credentials):
|
||||
"""Helper to get auth provider from a set of credentials."""
|
||||
auth_provider_key = (credentials.auth_provider_type,
|
||||
credentials.auth_provider_id)
|
||||
return self._providers[auth_provider_key]
|
||||
|
||||
|
||||
class AuthStore:
|
||||
"""Stores authentication info.
|
||||
|
||||
Any mutation to an object should happen inside the auth store.
|
||||
|
||||
The auth store is lazy. It won't load the data from disk until a method is
|
||||
called that needs it.
|
||||
"""
|
||||
|
||||
def __init__(self, hass):
|
||||
"""Initialize the auth store."""
|
||||
self.hass = hass
|
||||
self.users = None
|
||||
self.clients = None
|
||||
self._load_lock = asyncio.Lock(loop=hass.loop)
|
||||
|
||||
async def credentials_for_provider(self, provider_type, provider_id):
|
||||
"""Return credentials for specific auth provider type and id."""
|
||||
if self.users is None:
|
||||
await self.async_load()
|
||||
|
||||
return [
|
||||
credentials
|
||||
for user in self.users.values()
|
||||
for credentials in user.credentials
|
||||
if (credentials.auth_provider_type == provider_type and
|
||||
credentials.auth_provider_id == provider_id)
|
||||
]
|
||||
|
||||
async def async_get_user(self, user_id):
|
||||
"""Retrieve a user."""
|
||||
if self.users is None:
|
||||
await self.async_load()
|
||||
|
||||
return self.users.get(user_id)
|
||||
|
||||
async def async_get_or_create_user(self, credentials, auth_provider):
|
||||
"""Get or create a new user for given credentials.
|
||||
|
||||
If link_user is passed in, the credentials will be linked to the passed
|
||||
in user if the credentials are new.
|
||||
"""
|
||||
if self.users is None:
|
||||
await self.async_load()
|
||||
|
||||
# New credentials, store in user
|
||||
if credentials.is_new:
|
||||
info = await auth_provider.async_user_meta_for_credentials(
|
||||
credentials)
|
||||
# Make owner and activate user if it's the first user.
|
||||
if self.users:
|
||||
is_owner = False
|
||||
is_active = False
|
||||
else:
|
||||
is_owner = True
|
||||
is_active = True
|
||||
|
||||
new_user = User(
|
||||
is_owner=is_owner,
|
||||
is_active=is_active,
|
||||
name=info.get('name'),
|
||||
)
|
||||
self.users[new_user.id] = new_user
|
||||
await self.async_link_user(new_user, credentials)
|
||||
return new_user
|
||||
|
||||
for user in self.users.values():
|
||||
for creds in user.credentials:
|
||||
if (creds.auth_provider_type == credentials.auth_provider_type
|
||||
and creds.auth_provider_id ==
|
||||
credentials.auth_provider_id):
|
||||
return user
|
||||
|
||||
raise ValueError('We got credentials with ID but found no user')
|
||||
|
||||
async def async_link_user(self, user, credentials):
|
||||
"""Add credentials to an existing user."""
|
||||
user.credentials.append(credentials)
|
||||
await self.async_save()
|
||||
credentials.is_new = False
|
||||
|
||||
async def async_remove_user(self, user):
|
||||
"""Remove a user."""
|
||||
self.users.pop(user.id)
|
||||
await self.async_save()
|
||||
|
||||
async def async_create_refresh_token(self, user, client_id):
|
||||
"""Create a new token for a user."""
|
||||
refresh_token = RefreshToken(user, client_id)
|
||||
user.refresh_tokens[refresh_token.token] = refresh_token
|
||||
await self.async_save()
|
||||
return refresh_token
|
||||
|
||||
async def async_get_refresh_token(self, token):
|
||||
"""Get refresh token by token."""
|
||||
if self.users is None:
|
||||
await self.async_load()
|
||||
|
||||
for user in self.users.values():
|
||||
refresh_token = user.refresh_tokens.get(token)
|
||||
if refresh_token is not None:
|
||||
return refresh_token
|
||||
|
||||
return None
|
||||
|
||||
async def async_create_client(self, name, redirect_uris, no_secret):
|
||||
"""Create a new client."""
|
||||
if self.clients is None:
|
||||
await self.async_load()
|
||||
|
||||
kwargs = {
|
||||
'name': name,
|
||||
'redirect_uris': redirect_uris
|
||||
}
|
||||
|
||||
if no_secret:
|
||||
kwargs['secret'] = None
|
||||
|
||||
client = Client(**kwargs)
|
||||
self.clients[client.id] = client
|
||||
await self.async_save()
|
||||
return client
|
||||
|
||||
async def async_get_client(self, client_id):
|
||||
"""Get a client."""
|
||||
if self.clients is None:
|
||||
await self.async_load()
|
||||
|
||||
return self.clients.get(client_id)
|
||||
|
||||
async def async_load(self):
|
||||
"""Load the users."""
|
||||
async with self._load_lock:
|
||||
self.users = {}
|
||||
self.clients = {}
|
||||
|
||||
async def async_save(self):
|
||||
"""Save users."""
|
||||
pass
|
||||
242
homeassistant/auth/__init__.py
Normal file
242
homeassistant/auth/__init__.py
Normal file
@@ -0,0 +1,242 @@
|
||||
"""Provide an authentication layer for Home Assistant."""
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.core import callback
|
||||
|
||||
from . import models
|
||||
from . import auth_store
|
||||
from .providers import auth_provider_from_config
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def auth_manager_from_config(hass, provider_configs):
|
||||
"""Initialize an auth manager from config."""
|
||||
store = auth_store.AuthStore(hass)
|
||||
if provider_configs:
|
||||
providers = await asyncio.gather(
|
||||
*[auth_provider_from_config(hass, store, config)
|
||||
for config in provider_configs])
|
||||
else:
|
||||
providers = []
|
||||
# So returned auth providers are in same order as config
|
||||
provider_hash = OrderedDict()
|
||||
for provider in providers:
|
||||
if provider is None:
|
||||
continue
|
||||
|
||||
key = (provider.type, provider.id)
|
||||
|
||||
if key in provider_hash:
|
||||
_LOGGER.error(
|
||||
'Found duplicate provider: %s. Please add unique IDs if you '
|
||||
'want to have the same provider twice.', key)
|
||||
continue
|
||||
|
||||
provider_hash[key] = provider
|
||||
manager = AuthManager(hass, store, provider_hash)
|
||||
return manager
|
||||
|
||||
|
||||
class AuthManager:
|
||||
"""Manage the authentication for Home Assistant."""
|
||||
|
||||
def __init__(self, hass, store, providers):
|
||||
"""Initialize the auth manager."""
|
||||
self._store = store
|
||||
self._providers = providers
|
||||
self.login_flow = data_entry_flow.FlowManager(
|
||||
hass, self._async_create_login_flow,
|
||||
self._async_finish_login_flow)
|
||||
self._access_tokens = OrderedDict()
|
||||
|
||||
@property
|
||||
def active(self):
|
||||
"""Return if any auth providers are registered."""
|
||||
return bool(self._providers)
|
||||
|
||||
@property
|
||||
def support_legacy(self):
|
||||
"""
|
||||
Return if legacy_api_password auth providers are registered.
|
||||
|
||||
Should be removed when we removed legacy_api_password auth providers.
|
||||
"""
|
||||
for provider_type, _ in self._providers:
|
||||
if provider_type == 'legacy_api_password':
|
||||
return True
|
||||
return False
|
||||
|
||||
@property
|
||||
def auth_providers(self):
|
||||
"""Return a list of available auth providers."""
|
||||
return list(self._providers.values())
|
||||
|
||||
async def async_get_users(self):
|
||||
"""Retrieve all users."""
|
||||
return await self._store.async_get_users()
|
||||
|
||||
async def async_get_user(self, user_id):
|
||||
"""Retrieve a user."""
|
||||
return await self._store.async_get_user(user_id)
|
||||
|
||||
async def async_create_system_user(self, name):
|
||||
"""Create a system user."""
|
||||
return await self._store.async_create_user(
|
||||
name=name,
|
||||
system_generated=True,
|
||||
is_active=True,
|
||||
)
|
||||
|
||||
async def async_create_user(self, name):
|
||||
"""Create a user."""
|
||||
kwargs = {
|
||||
'name': name,
|
||||
'is_active': True,
|
||||
}
|
||||
|
||||
if await self._user_should_be_owner():
|
||||
kwargs['is_owner'] = True
|
||||
|
||||
return await self._store.async_create_user(**kwargs)
|
||||
|
||||
async def async_get_or_create_user(self, credentials):
|
||||
"""Get or create a user."""
|
||||
if not credentials.is_new:
|
||||
for user in await self._store.async_get_users():
|
||||
for creds in user.credentials:
|
||||
if creds.id == credentials.id:
|
||||
return user
|
||||
|
||||
raise ValueError('Unable to find the user.')
|
||||
|
||||
auth_provider = self._async_get_auth_provider(credentials)
|
||||
|
||||
if auth_provider is None:
|
||||
raise RuntimeError('Credential with unknown provider encountered')
|
||||
|
||||
info = await auth_provider.async_user_meta_for_credentials(
|
||||
credentials)
|
||||
|
||||
return await self._store.async_create_user(
|
||||
credentials=credentials,
|
||||
name=info.get('name'),
|
||||
)
|
||||
|
||||
async def async_link_user(self, user, credentials):
|
||||
"""Link credentials to an existing user."""
|
||||
await self._store.async_link_user(user, credentials)
|
||||
|
||||
async def async_remove_user(self, user):
|
||||
"""Remove a user."""
|
||||
tasks = [
|
||||
self.async_remove_credentials(credentials)
|
||||
for credentials in user.credentials
|
||||
]
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
await self._store.async_remove_user(user)
|
||||
|
||||
async def async_activate_user(self, user):
|
||||
"""Activate a user."""
|
||||
await self._store.async_activate_user(user)
|
||||
|
||||
async def async_deactivate_user(self, user):
|
||||
"""Deactivate a user."""
|
||||
if user.is_owner:
|
||||
raise ValueError('Unable to deactive the owner')
|
||||
await self._store.async_deactivate_user(user)
|
||||
|
||||
async def async_remove_credentials(self, credentials):
|
||||
"""Remove credentials."""
|
||||
provider = self._async_get_auth_provider(credentials)
|
||||
|
||||
if (provider is not None and
|
||||
hasattr(provider, 'async_will_remove_credentials')):
|
||||
await provider.async_will_remove_credentials(credentials)
|
||||
|
||||
await self._store.async_remove_credentials(credentials)
|
||||
|
||||
async def async_create_refresh_token(self, user, client_id=None):
|
||||
"""Create a new refresh token for a user."""
|
||||
if not user.is_active:
|
||||
raise ValueError('User is not active')
|
||||
|
||||
if user.system_generated and client_id is not None:
|
||||
raise ValueError(
|
||||
'System generated users cannot have refresh tokens connected '
|
||||
'to a client.')
|
||||
|
||||
if not user.system_generated and client_id is None:
|
||||
raise ValueError('Client is required to generate a refresh token.')
|
||||
|
||||
return await self._store.async_create_refresh_token(user, client_id)
|
||||
|
||||
async def async_get_refresh_token(self, token):
|
||||
"""Get refresh token by token."""
|
||||
return await self._store.async_get_refresh_token(token)
|
||||
|
||||
@callback
|
||||
def async_create_access_token(self, refresh_token):
|
||||
"""Create a new access token."""
|
||||
access_token = models.AccessToken(refresh_token=refresh_token)
|
||||
self._access_tokens[access_token.token] = access_token
|
||||
return access_token
|
||||
|
||||
@callback
|
||||
def async_get_access_token(self, token):
|
||||
"""Get an access token."""
|
||||
tkn = self._access_tokens.get(token)
|
||||
|
||||
if tkn is None:
|
||||
_LOGGER.debug('Attempt to get non-existing access token')
|
||||
return None
|
||||
|
||||
if tkn.expired or not tkn.refresh_token.user.is_active:
|
||||
if tkn.expired:
|
||||
_LOGGER.debug('Attempt to get expired access token')
|
||||
else:
|
||||
_LOGGER.debug('Attempt to get access token for inactive user')
|
||||
self._access_tokens.pop(token)
|
||||
return None
|
||||
|
||||
return tkn
|
||||
|
||||
async def _async_create_login_flow(self, handler, *, source, data):
|
||||
"""Create a login flow."""
|
||||
auth_provider = self._providers[handler]
|
||||
|
||||
return await auth_provider.async_credential_flow()
|
||||
|
||||
async def _async_finish_login_flow(self, result):
|
||||
"""Result of a credential login flow."""
|
||||
if result['type'] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||
return None
|
||||
|
||||
auth_provider = self._providers[result['handler']]
|
||||
return await auth_provider.async_get_or_create_credentials(
|
||||
result['data'])
|
||||
|
||||
@callback
|
||||
def _async_get_auth_provider(self, credentials):
|
||||
"""Helper to get auth provider from a set of credentials."""
|
||||
auth_provider_key = (credentials.auth_provider_type,
|
||||
credentials.auth_provider_id)
|
||||
return self._providers.get(auth_provider_key)
|
||||
|
||||
async def _user_should_be_owner(self):
|
||||
"""Determine if user should be owner.
|
||||
|
||||
A user should be an owner if it is the first non-system user that is
|
||||
being created.
|
||||
"""
|
||||
for user in await self._store.async_get_users():
|
||||
if not user.system_generated:
|
||||
return False
|
||||
|
||||
return True
|
||||
240
homeassistant/auth/auth_store.py
Normal file
240
homeassistant/auth/auth_store.py
Normal file
@@ -0,0 +1,240 @@
|
||||
"""Storage for auth models."""
|
||||
from collections import OrderedDict
|
||||
from datetime import timedelta
|
||||
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import models
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = 'auth'
|
||||
|
||||
|
||||
class AuthStore:
|
||||
"""Stores authentication info.
|
||||
|
||||
Any mutation to an object should happen inside the auth store.
|
||||
|
||||
The auth store is lazy. It won't load the data from disk until a method is
|
||||
called that needs it.
|
||||
"""
|
||||
|
||||
def __init__(self, hass):
|
||||
"""Initialize the auth store."""
|
||||
self.hass = hass
|
||||
self._users = None
|
||||
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
|
||||
|
||||
async def async_get_users(self):
|
||||
"""Retrieve all users."""
|
||||
if self._users is None:
|
||||
await self.async_load()
|
||||
|
||||
return list(self._users.values())
|
||||
|
||||
async def async_get_user(self, user_id):
|
||||
"""Retrieve a user by id."""
|
||||
if self._users is None:
|
||||
await self.async_load()
|
||||
|
||||
return self._users.get(user_id)
|
||||
|
||||
async def async_create_user(self, name, is_owner=None, is_active=None,
|
||||
system_generated=None, credentials=None):
|
||||
"""Create a new user."""
|
||||
if self._users is None:
|
||||
await self.async_load()
|
||||
|
||||
kwargs = {
|
||||
'name': name
|
||||
}
|
||||
|
||||
if is_owner is not None:
|
||||
kwargs['is_owner'] = is_owner
|
||||
|
||||
if is_active is not None:
|
||||
kwargs['is_active'] = is_active
|
||||
|
||||
if system_generated is not None:
|
||||
kwargs['system_generated'] = system_generated
|
||||
|
||||
new_user = models.User(**kwargs)
|
||||
|
||||
self._users[new_user.id] = new_user
|
||||
|
||||
if credentials is None:
|
||||
await self.async_save()
|
||||
return new_user
|
||||
|
||||
# Saving is done inside the link.
|
||||
await self.async_link_user(new_user, credentials)
|
||||
return new_user
|
||||
|
||||
async def async_link_user(self, user, credentials):
|
||||
"""Add credentials to an existing user."""
|
||||
user.credentials.append(credentials)
|
||||
await self.async_save()
|
||||
credentials.is_new = False
|
||||
|
||||
async def async_remove_user(self, user):
|
||||
"""Remove a user."""
|
||||
self._users.pop(user.id)
|
||||
await self.async_save()
|
||||
|
||||
async def async_activate_user(self, user):
|
||||
"""Activate a user."""
|
||||
user.is_active = True
|
||||
await self.async_save()
|
||||
|
||||
async def async_deactivate_user(self, user):
|
||||
"""Activate a user."""
|
||||
user.is_active = False
|
||||
await self.async_save()
|
||||
|
||||
async def async_remove_credentials(self, credentials):
|
||||
"""Remove credentials."""
|
||||
for user in self._users.values():
|
||||
found = None
|
||||
|
||||
for index, cred in enumerate(user.credentials):
|
||||
if cred is credentials:
|
||||
found = index
|
||||
break
|
||||
|
||||
if found is not None:
|
||||
user.credentials.pop(found)
|
||||
break
|
||||
|
||||
await self.async_save()
|
||||
|
||||
async def async_create_refresh_token(self, user, client_id=None):
|
||||
"""Create a new token for a user."""
|
||||
refresh_token = models.RefreshToken(user=user, client_id=client_id)
|
||||
user.refresh_tokens[refresh_token.token] = refresh_token
|
||||
await self.async_save()
|
||||
return refresh_token
|
||||
|
||||
async def async_get_refresh_token(self, token):
|
||||
"""Get refresh token by token."""
|
||||
if self._users is None:
|
||||
await self.async_load()
|
||||
|
||||
for user in self._users.values():
|
||||
refresh_token = user.refresh_tokens.get(token)
|
||||
if refresh_token is not None:
|
||||
return refresh_token
|
||||
|
||||
return None
|
||||
|
||||
async def async_load(self):
|
||||
"""Load the users."""
|
||||
data = await self._store.async_load()
|
||||
|
||||
# Make sure that we're not overriding data if 2 loads happened at the
|
||||
# same time
|
||||
if self._users is not None:
|
||||
return
|
||||
|
||||
users = OrderedDict()
|
||||
|
||||
if data is None:
|
||||
self._users = users
|
||||
return
|
||||
|
||||
for user_dict in data['users']:
|
||||
users[user_dict['id']] = models.User(**user_dict)
|
||||
|
||||
for cred_dict in data['credentials']:
|
||||
users[cred_dict['user_id']].credentials.append(models.Credentials(
|
||||
id=cred_dict['id'],
|
||||
is_new=False,
|
||||
auth_provider_type=cred_dict['auth_provider_type'],
|
||||
auth_provider_id=cred_dict['auth_provider_id'],
|
||||
data=cred_dict['data'],
|
||||
))
|
||||
|
||||
refresh_tokens = OrderedDict()
|
||||
|
||||
for rt_dict in data['refresh_tokens']:
|
||||
token = models.RefreshToken(
|
||||
id=rt_dict['id'],
|
||||
user=users[rt_dict['user_id']],
|
||||
client_id=rt_dict['client_id'],
|
||||
created_at=dt_util.parse_datetime(rt_dict['created_at']),
|
||||
access_token_expiration=timedelta(
|
||||
seconds=rt_dict['access_token_expiration']),
|
||||
token=rt_dict['token'],
|
||||
)
|
||||
refresh_tokens[token.id] = token
|
||||
users[rt_dict['user_id']].refresh_tokens[token.token] = token
|
||||
|
||||
for ac_dict in data['access_tokens']:
|
||||
refresh_token = refresh_tokens[ac_dict['refresh_token_id']]
|
||||
token = models.AccessToken(
|
||||
refresh_token=refresh_token,
|
||||
created_at=dt_util.parse_datetime(ac_dict['created_at']),
|
||||
token=ac_dict['token'],
|
||||
)
|
||||
refresh_token.access_tokens.append(token)
|
||||
|
||||
self._users = users
|
||||
|
||||
async def async_save(self):
|
||||
"""Save users."""
|
||||
users = [
|
||||
{
|
||||
'id': user.id,
|
||||
'is_owner': user.is_owner,
|
||||
'is_active': user.is_active,
|
||||
'name': user.name,
|
||||
'system_generated': user.system_generated,
|
||||
}
|
||||
for user in self._users.values()
|
||||
]
|
||||
|
||||
credentials = [
|
||||
{
|
||||
'id': credential.id,
|
||||
'user_id': user.id,
|
||||
'auth_provider_type': credential.auth_provider_type,
|
||||
'auth_provider_id': credential.auth_provider_id,
|
||||
'data': credential.data,
|
||||
}
|
||||
for user in self._users.values()
|
||||
for credential in user.credentials
|
||||
]
|
||||
|
||||
refresh_tokens = [
|
||||
{
|
||||
'id': refresh_token.id,
|
||||
'user_id': user.id,
|
||||
'client_id': refresh_token.client_id,
|
||||
'created_at': refresh_token.created_at.isoformat(),
|
||||
'access_token_expiration':
|
||||
refresh_token.access_token_expiration.total_seconds(),
|
||||
'token': refresh_token.token,
|
||||
}
|
||||
for user in self._users.values()
|
||||
for refresh_token in user.refresh_tokens.values()
|
||||
]
|
||||
|
||||
access_tokens = [
|
||||
{
|
||||
'id': user.id,
|
||||
'refresh_token_id': refresh_token.id,
|
||||
'created_at': access_token.created_at.isoformat(),
|
||||
'token': access_token.token,
|
||||
}
|
||||
for user in self._users.values()
|
||||
for refresh_token in user.refresh_tokens.values()
|
||||
for access_token in refresh_token.access_tokens
|
||||
]
|
||||
|
||||
data = {
|
||||
'users': users,
|
||||
'credentials': credentials,
|
||||
'access_tokens': access_tokens,
|
||||
'refresh_tokens': refresh_tokens,
|
||||
}
|
||||
|
||||
await self._store.async_save(data, delay=1)
|
||||
4
homeassistant/auth/const.py
Normal file
4
homeassistant/auth/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the auth module."""
|
||||
from datetime import timedelta
|
||||
|
||||
ACCESS_TOKEN_EXPIRATION = timedelta(minutes=30)
|
||||
75
homeassistant/auth/models.py
Normal file
75
homeassistant/auth/models.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Auth models."""
|
||||
from datetime import datetime, timedelta
|
||||
import uuid
|
||||
|
||||
import attr
|
||||
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ACCESS_TOKEN_EXPIRATION
|
||||
from .util import generate_secret
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class User:
|
||||
"""A user."""
|
||||
|
||||
name = attr.ib(type=str)
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
is_owner = attr.ib(type=bool, default=False)
|
||||
is_active = attr.ib(type=bool, default=False)
|
||||
system_generated = attr.ib(type=bool, default=False)
|
||||
|
||||
# List of credentials of a user.
|
||||
credentials = attr.ib(type=list, default=attr.Factory(list), cmp=False)
|
||||
|
||||
# Tokens associated with a user.
|
||||
refresh_tokens = attr.ib(type=dict, default=attr.Factory(dict), cmp=False)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class RefreshToken:
|
||||
"""RefreshToken for a user to grant new access tokens."""
|
||||
|
||||
user = attr.ib(type=User)
|
||||
client_id = attr.ib(type=str)
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
created_at = attr.ib(type=datetime, default=attr.Factory(dt_util.utcnow))
|
||||
access_token_expiration = attr.ib(type=timedelta,
|
||||
default=ACCESS_TOKEN_EXPIRATION)
|
||||
token = attr.ib(type=str,
|
||||
default=attr.Factory(lambda: generate_secret(64)))
|
||||
access_tokens = attr.ib(type=list, default=attr.Factory(list), cmp=False)
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class AccessToken:
|
||||
"""Access token to access the API.
|
||||
|
||||
These will only ever be stored in memory and not be persisted.
|
||||
"""
|
||||
|
||||
refresh_token = attr.ib(type=RefreshToken)
|
||||
created_at = attr.ib(type=datetime, default=attr.Factory(dt_util.utcnow))
|
||||
token = attr.ib(type=str,
|
||||
default=attr.Factory(generate_secret))
|
||||
|
||||
@property
|
||||
def expired(self):
|
||||
"""Return if this token has expired."""
|
||||
expires = self.created_at + self.refresh_token.access_token_expiration
|
||||
return dt_util.utcnow() > expires
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class Credentials:
|
||||
"""Credentials for a user on an auth provider."""
|
||||
|
||||
auth_provider_type = attr.ib(type=str)
|
||||
auth_provider_id = attr.ib(type=str)
|
||||
|
||||
# Allow the auth provider to store data to represent their auth.
|
||||
data = attr.ib(type=dict)
|
||||
|
||||
id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))
|
||||
is_new = attr.ib(type=bool, default=True)
|
||||
139
homeassistant/auth/providers/__init__.py
Normal file
139
homeassistant/auth/providers/__init__.py
Normal file
@@ -0,0 +1,139 @@
|
||||
"""Auth providers for Home Assistant."""
|
||||
import importlib
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from homeassistant import requirements
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.const import CONF_TYPE, CONF_NAME, CONF_ID
|
||||
from homeassistant.util.decorator import Registry
|
||||
|
||||
from homeassistant.auth.models import Credentials
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DATA_REQS = 'auth_prov_reqs_processed'
|
||||
|
||||
AUTH_PROVIDERS = Registry()
|
||||
|
||||
AUTH_PROVIDER_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_TYPE): str,
|
||||
vol.Optional(CONF_NAME): str,
|
||||
# Specify ID if you have two auth providers for same type.
|
||||
vol.Optional(CONF_ID): str,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
async def auth_provider_from_config(hass, store, config):
|
||||
"""Initialize an auth provider from a config."""
|
||||
provider_name = config[CONF_TYPE]
|
||||
module = await load_auth_provider_module(hass, provider_name)
|
||||
|
||||
if module is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
config = module.CONFIG_SCHEMA(config)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error('Invalid configuration for auth provider %s: %s',
|
||||
provider_name, humanize_error(config, err))
|
||||
return None
|
||||
|
||||
return AUTH_PROVIDERS[provider_name](hass, store, config)
|
||||
|
||||
|
||||
async def load_auth_provider_module(hass, provider):
|
||||
"""Load an auth provider."""
|
||||
try:
|
||||
module = importlib.import_module(
|
||||
'homeassistant.auth.providers.{}'.format(provider))
|
||||
except ImportError:
|
||||
_LOGGER.warning('Unable to find auth provider %s', provider)
|
||||
return None
|
||||
|
||||
if hass.config.skip_pip or not hasattr(module, 'REQUIREMENTS'):
|
||||
return module
|
||||
|
||||
processed = hass.data.get(DATA_REQS)
|
||||
|
||||
if processed is None:
|
||||
processed = hass.data[DATA_REQS] = set()
|
||||
elif provider in processed:
|
||||
return module
|
||||
|
||||
req_success = await requirements.async_process_requirements(
|
||||
hass, 'auth provider {}'.format(provider), module.REQUIREMENTS)
|
||||
|
||||
if not req_success:
|
||||
return None
|
||||
|
||||
processed.add(provider)
|
||||
return module
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
|
||||
DEFAULT_TITLE = 'Unnamed auth provider'
|
||||
|
||||
def __init__(self, hass, store, config):
|
||||
"""Initialize an auth provider."""
|
||||
self.hass = hass
|
||||
self.store = store
|
||||
self.config = config
|
||||
|
||||
@property
|
||||
def id(self): # pylint: disable=invalid-name
|
||||
"""Return id of the auth provider.
|
||||
|
||||
Optional, can be None.
|
||||
"""
|
||||
return self.config.get(CONF_ID)
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
"""Return type of the provider."""
|
||||
return self.config[CONF_TYPE]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the auth provider."""
|
||||
return self.config.get(CONF_NAME, self.DEFAULT_TITLE)
|
||||
|
||||
async def async_credentials(self):
|
||||
"""Return all credentials of this provider."""
|
||||
users = await self.store.async_get_users()
|
||||
return [
|
||||
credentials
|
||||
for user in users
|
||||
for credentials in user.credentials
|
||||
if (credentials.auth_provider_type == self.type and
|
||||
credentials.auth_provider_id == self.id)
|
||||
]
|
||||
|
||||
@callback
|
||||
def async_create_credentials(self, data):
|
||||
"""Create credentials."""
|
||||
return Credentials(
|
||||
auth_provider_type=self.type,
|
||||
auth_provider_id=self.id,
|
||||
data=data,
|
||||
)
|
||||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_credential_flow(self):
|
||||
"""Return the data flow for logging in with auth provider."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_get_or_create_credentials(self, flow_result):
|
||||
"""Get credentials based on the flow result."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_user_meta_for_credentials(self, credentials):
|
||||
"""Return extra user metadata for credentials.
|
||||
|
||||
Will be used to populate info when creating a new user.
|
||||
"""
|
||||
return {}
|
||||
@@ -6,15 +6,29 @@ import hmac
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import auth, data_entry_flow
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.const import CONF_ID
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.util import json
|
||||
|
||||
from homeassistant.auth.util import generate_secret
|
||||
|
||||
from . import AuthProvider, AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = 'auth_provider.homeassistant'
|
||||
|
||||
|
||||
PATH_DATA = '.users.json'
|
||||
def _disallow_id(conf):
|
||||
"""Disallow ID in config."""
|
||||
if CONF_ID in conf:
|
||||
raise vol.Invalid(
|
||||
'ID is not allowed for the homeassistant auth provider.')
|
||||
|
||||
CONFIG_SCHEMA = auth.AUTH_PROVIDER_SCHEMA.extend({
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
return conf
|
||||
|
||||
|
||||
CONFIG_SCHEMA = vol.All(AUTH_PROVIDER_SCHEMA, _disallow_id)
|
||||
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
@@ -31,14 +45,22 @@ class InvalidUser(HomeAssistantError):
|
||||
class Data:
|
||||
"""Hold the user data."""
|
||||
|
||||
def __init__(self, path, data):
|
||||
def __init__(self, hass):
|
||||
"""Initialize the user data store."""
|
||||
self.path = path
|
||||
self.hass = hass
|
||||
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
|
||||
self._data = None
|
||||
|
||||
async def async_load(self):
|
||||
"""Load stored data."""
|
||||
data = await self._store.async_load()
|
||||
|
||||
if data is None:
|
||||
data = {
|
||||
'salt': auth.generate_secret(),
|
||||
'salt': generate_secret(),
|
||||
'users': []
|
||||
}
|
||||
|
||||
self._data = data
|
||||
|
||||
@property
|
||||
@@ -77,8 +99,8 @@ class Data:
|
||||
hashed = base64.b64encode(hashed).decode()
|
||||
return hashed
|
||||
|
||||
def add_user(self, username, password):
|
||||
"""Add a user."""
|
||||
def add_auth(self, username, password):
|
||||
"""Add a new authenticated user/pass."""
|
||||
if any(user['username'] == username for user in self.users):
|
||||
raise InvalidUser
|
||||
|
||||
@@ -87,8 +109,22 @@ class Data:
|
||||
'password': self.hash_password(password, True),
|
||||
})
|
||||
|
||||
@callback
|
||||
def async_remove_auth(self, username):
|
||||
"""Remove authentication."""
|
||||
index = None
|
||||
for i, user in enumerate(self.users):
|
||||
if user['username'] == username:
|
||||
index = i
|
||||
break
|
||||
|
||||
if index is None:
|
||||
raise InvalidUser
|
||||
|
||||
self.users.pop(index)
|
||||
|
||||
def change_password(self, username, new_password):
|
||||
"""Update the password of a user.
|
||||
"""Update the password.
|
||||
|
||||
Raises InvalidUser if user cannot be found.
|
||||
"""
|
||||
@@ -99,34 +135,38 @@ class Data:
|
||||
else:
|
||||
raise InvalidUser
|
||||
|
||||
def save(self):
|
||||
async def async_save(self):
|
||||
"""Save data."""
|
||||
json.save_json(self.path, self._data)
|
||||
await self._store.async_save(self._data)
|
||||
|
||||
|
||||
def load_data(path):
|
||||
"""Load auth data."""
|
||||
return Data(path, json.load_json(path, None))
|
||||
|
||||
|
||||
@auth.AUTH_PROVIDERS.register('homeassistant')
|
||||
class HassAuthProvider(auth.AuthProvider):
|
||||
@AUTH_PROVIDERS.register('homeassistant')
|
||||
class HassAuthProvider(AuthProvider):
|
||||
"""Auth provider based on a local storage of users in HASS config dir."""
|
||||
|
||||
DEFAULT_TITLE = 'Home Assistant Local'
|
||||
|
||||
data = None
|
||||
|
||||
async def async_initialize(self):
|
||||
"""Initialize the auth provider."""
|
||||
if self.data is not None:
|
||||
return
|
||||
|
||||
self.data = Data(self.hass)
|
||||
await self.data.async_load()
|
||||
|
||||
async def async_credential_flow(self):
|
||||
"""Return a flow to login."""
|
||||
return LoginFlow(self)
|
||||
|
||||
async def async_validate_login(self, username, password):
|
||||
"""Helper to validate a username and password."""
|
||||
def validate():
|
||||
"""Validate creds."""
|
||||
data = self._auth_data()
|
||||
data.validate_login(username, password)
|
||||
if self.data is None:
|
||||
await self.async_initialize()
|
||||
|
||||
await self.hass.async_add_job(validate)
|
||||
await self.hass.async_add_executor_job(
|
||||
self.data.validate_login, username, password)
|
||||
|
||||
async def async_get_or_create_credentials(self, flow_result):
|
||||
"""Get credentials based on the flow result."""
|
||||
@@ -141,9 +181,23 @@ class HassAuthProvider(auth.AuthProvider):
|
||||
'username': username
|
||||
})
|
||||
|
||||
def _auth_data(self):
|
||||
"""Return the auth provider data."""
|
||||
return load_data(self.hass.config.path(PATH_DATA))
|
||||
async def async_user_meta_for_credentials(self, credentials):
|
||||
"""Get extra info for this credential."""
|
||||
return {
|
||||
'name': credentials.data['username']
|
||||
}
|
||||
|
||||
async def async_will_remove_credentials(self, credentials):
|
||||
"""When credentials get removed, also remove the auth."""
|
||||
if self.data is None:
|
||||
await self.async_initialize()
|
||||
|
||||
try:
|
||||
self.data.async_remove_auth(credentials.data['username'])
|
||||
await self.data.async_save()
|
||||
except InvalidUser:
|
||||
# Can happen if somehow we didn't clean up a credential
|
||||
pass
|
||||
|
||||
|
||||
class LoginFlow(data_entry_flow.FlowHandler):
|
||||
@@ -5,9 +5,11 @@ import hmac
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant import auth, data_entry_flow
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.core import callback
|
||||
|
||||
from . import AuthProvider, AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS
|
||||
|
||||
|
||||
USER_SCHEMA = vol.Schema({
|
||||
vol.Required('username'): str,
|
||||
@@ -16,7 +18,7 @@ USER_SCHEMA = vol.Schema({
|
||||
})
|
||||
|
||||
|
||||
CONFIG_SCHEMA = auth.AUTH_PROVIDER_SCHEMA.extend({
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend({
|
||||
vol.Required('users'): [USER_SCHEMA]
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
@@ -25,8 +27,8 @@ class InvalidAuthError(HomeAssistantError):
|
||||
"""Raised when submitting invalid authentication."""
|
||||
|
||||
|
||||
@auth.AUTH_PROVIDERS.register('insecure_example')
|
||||
class ExampleAuthProvider(auth.AuthProvider):
|
||||
@AUTH_PROVIDERS.register('insecure_example')
|
||||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
async def async_credential_flow(self):
|
||||
107
homeassistant/auth/providers/legacy_api_password.py
Normal file
107
homeassistant/auth/providers/legacy_api_password.py
Normal file
@@ -0,0 +1,107 @@
|
||||
"""
|
||||
Support Legacy API password auth provider.
|
||||
|
||||
It will be removed when auth system production ready
|
||||
"""
|
||||
from collections import OrderedDict
|
||||
import hmac
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.core import callback
|
||||
|
||||
from . import AuthProvider, AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS
|
||||
|
||||
|
||||
USER_SCHEMA = vol.Schema({
|
||||
vol.Required('username'): str,
|
||||
})
|
||||
|
||||
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend({
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
LEGACY_USER = 'homeassistant'
|
||||
|
||||
|
||||
class InvalidAuthError(HomeAssistantError):
|
||||
"""Raised when submitting invalid authentication."""
|
||||
|
||||
|
||||
@AUTH_PROVIDERS.register('legacy_api_password')
|
||||
class LegacyApiPasswordAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
DEFAULT_TITLE = 'Legacy API Password'
|
||||
|
||||
async def async_credential_flow(self):
|
||||
"""Return a flow to login."""
|
||||
return LoginFlow(self)
|
||||
|
||||
@callback
|
||||
def async_validate_login(self, password):
|
||||
"""Helper to validate a username and password."""
|
||||
if not hasattr(self.hass, 'http'):
|
||||
raise ValueError('http component is not loaded')
|
||||
|
||||
if self.hass.http.api_password is None:
|
||||
raise ValueError('http component is not configured using'
|
||||
' api_password')
|
||||
|
||||
if not hmac.compare_digest(self.hass.http.api_password.encode('utf-8'),
|
||||
password.encode('utf-8')):
|
||||
raise InvalidAuthError
|
||||
|
||||
async def async_get_or_create_credentials(self, flow_result):
|
||||
"""Return LEGACY_USER always."""
|
||||
for credential in await self.async_credentials():
|
||||
if credential.data['username'] == LEGACY_USER:
|
||||
return credential
|
||||
|
||||
return self.async_create_credentials({
|
||||
'username': LEGACY_USER
|
||||
})
|
||||
|
||||
async def async_user_meta_for_credentials(self, credentials):
|
||||
"""
|
||||
Set name as LEGACY_USER always.
|
||||
|
||||
Will be used to populate info when creating a new user.
|
||||
"""
|
||||
return {'name': LEGACY_USER}
|
||||
|
||||
|
||||
class LoginFlow(data_entry_flow.FlowHandler):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
def __init__(self, auth_provider):
|
||||
"""Initialize the login flow."""
|
||||
self._auth_provider = auth_provider
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
self._auth_provider.async_validate_login(
|
||||
user_input['password'])
|
||||
except InvalidAuthError:
|
||||
errors['base'] = 'invalid_auth'
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._auth_provider.name,
|
||||
data={}
|
||||
)
|
||||
|
||||
schema = OrderedDict()
|
||||
schema['password'] = str
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=vol.Schema(schema),
|
||||
errors=errors,
|
||||
)
|
||||
13
homeassistant/auth/util.py
Normal file
13
homeassistant/auth/util.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Auth utils."""
|
||||
import binascii
|
||||
import os
|
||||
|
||||
|
||||
def generate_secret(entropy: int = 32) -> str:
|
||||
"""Generate a secret.
|
||||
|
||||
Backport of secrets.token_hex from Python 3.6
|
||||
|
||||
Event loop friendly.
|
||||
"""
|
||||
return binascii.hexlify(os.urandom(entropy)).decode('ascii')
|
||||
@@ -1 +0,0 @@
|
||||
"""Auth providers for Home Assistant."""
|
||||
@@ -1,5 +1,4 @@
|
||||
"""Provide methods to bootstrap a Home Assistant instance."""
|
||||
import asyncio
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
@@ -17,7 +16,7 @@ from homeassistant.components import persistent_notification
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
|
||||
from homeassistant.setup import async_setup_component
|
||||
from homeassistant.util.logging import AsyncHandler
|
||||
from homeassistant.util.package import async_get_user_site, get_user_site
|
||||
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
||||
from homeassistant.util.yaml import clear_secret_cache
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.signal import async_register_signal_handling
|
||||
@@ -29,9 +28,8 @@ ERROR_LOG_FILENAME = 'home-assistant.log'
|
||||
# hass.data key for logging information.
|
||||
DATA_LOGGING = 'logging'
|
||||
|
||||
FIRST_INIT_COMPONENT = set((
|
||||
'system_log', 'recorder', 'mqtt', 'mqtt_eventstream', 'logger',
|
||||
'introduction', 'frontend', 'history'))
|
||||
FIRST_INIT_COMPONENT = {'system_log', 'recorder', 'mqtt', 'mqtt_eventstream',
|
||||
'logger', 'introduction', 'frontend', 'history'}
|
||||
|
||||
|
||||
def from_config_dict(config: Dict[str, Any],
|
||||
@@ -53,8 +51,9 @@ def from_config_dict(config: Dict[str, Any],
|
||||
if config_dir is not None:
|
||||
config_dir = os.path.abspath(config_dir)
|
||||
hass.config.config_dir = config_dir
|
||||
hass.loop.run_until_complete(
|
||||
async_mount_local_lib_path(config_dir, hass.loop))
|
||||
if not is_virtual_env():
|
||||
hass.loop.run_until_complete(
|
||||
async_mount_local_lib_path(config_dir))
|
||||
|
||||
# run task
|
||||
hass = hass.loop.run_until_complete(
|
||||
@@ -95,7 +94,8 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
conf_util.async_log_exception(ex, 'homeassistant', core_config, hass)
|
||||
return None
|
||||
|
||||
await hass.async_add_job(conf_util.process_ha_config_upgrade, hass)
|
||||
await hass.async_add_executor_job(
|
||||
conf_util.process_ha_config_upgrade, hass)
|
||||
|
||||
hass.config.skip_pip = skip_pip
|
||||
if skip_pip:
|
||||
@@ -123,7 +123,6 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
components.update(hass.config_entries.async_domains())
|
||||
|
||||
# setup components
|
||||
# pylint: disable=not-an-iterable
|
||||
res = await core_components.async_setup(hass, config)
|
||||
if not res:
|
||||
_LOGGER.error("Home Assistant core failed to initialize. "
|
||||
@@ -138,7 +137,7 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
for component in components:
|
||||
if component not in FIRST_INIT_COMPONENT:
|
||||
continue
|
||||
hass.async_add_job(async_setup_component(hass, component, config))
|
||||
hass.async_create_task(async_setup_component(hass, component, config))
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -146,7 +145,7 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
for component in components:
|
||||
if component in FIRST_INIT_COMPONENT:
|
||||
continue
|
||||
hass.async_add_job(async_setup_component(hass, component, config))
|
||||
hass.async_create_task(async_setup_component(hass, component, config))
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -163,7 +162,8 @@ def from_config_file(config_path: str,
|
||||
skip_pip: bool = True,
|
||||
log_rotate_days: Any = None,
|
||||
log_file: Any = None,
|
||||
log_no_color: bool = False):
|
||||
log_no_color: bool = False)\
|
||||
-> Optional[core.HomeAssistant]:
|
||||
"""Read the configuration file and try to start all the functionality.
|
||||
|
||||
Will add functionality to 'hass' parameter if given,
|
||||
@@ -188,7 +188,8 @@ async def async_from_config_file(config_path: str,
|
||||
skip_pip: bool = True,
|
||||
log_rotate_days: Any = None,
|
||||
log_file: Any = None,
|
||||
log_no_color: bool = False):
|
||||
log_no_color: bool = False)\
|
||||
-> Optional[core.HomeAssistant]:
|
||||
"""Read the configuration file and try to start all the functionality.
|
||||
|
||||
Will add functionality to 'hass' parameter.
|
||||
@@ -197,13 +198,15 @@ async def async_from_config_file(config_path: str,
|
||||
# Set config dir to directory holding config file
|
||||
config_dir = os.path.abspath(os.path.dirname(config_path))
|
||||
hass.config.config_dir = config_dir
|
||||
await async_mount_local_lib_path(config_dir, hass.loop)
|
||||
|
||||
if not is_virtual_env():
|
||||
await async_mount_local_lib_path(config_dir)
|
||||
|
||||
async_enable_logging(hass, verbose, log_rotate_days, log_file,
|
||||
log_no_color)
|
||||
|
||||
try:
|
||||
config_dict = await hass.async_add_job(
|
||||
config_dict = await hass.async_add_executor_job(
|
||||
conf_util.load_yaml_config_file, config_path)
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.error("Error loading %s: %s", config_path, err)
|
||||
@@ -211,9 +214,8 @@ async def async_from_config_file(config_path: str,
|
||||
finally:
|
||||
clear_secret_cache()
|
||||
|
||||
hass = await async_from_config_dict(
|
||||
return await async_from_config_dict(
|
||||
config_dict, hass, enable_log=False, skip_pip=skip_pip)
|
||||
return hass
|
||||
|
||||
|
||||
@core.callback
|
||||
@@ -308,23 +310,13 @@ def async_enable_logging(hass: core.HomeAssistant,
|
||||
"Unable to setup error log %s (access denied)", err_log_path)
|
||||
|
||||
|
||||
def mount_local_lib_path(config_dir: str) -> str:
|
||||
"""Add local library to Python Path."""
|
||||
deps_dir = os.path.join(config_dir, 'deps')
|
||||
lib_dir = get_user_site(deps_dir)
|
||||
if lib_dir not in sys.path:
|
||||
sys.path.insert(0, lib_dir)
|
||||
return deps_dir
|
||||
|
||||
|
||||
async def async_mount_local_lib_path(config_dir: str,
|
||||
loop: asyncio.AbstractEventLoop) -> str:
|
||||
async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
"""Add local library to Python Path.
|
||||
|
||||
This function is a coroutine.
|
||||
"""
|
||||
deps_dir = os.path.join(config_dir, 'deps')
|
||||
lib_dir = await async_get_user_site(deps_dir, loop=loop)
|
||||
lib_dir = await async_get_user_site(deps_dir)
|
||||
if lib_dir not in sys.path:
|
||||
sys.path.insert(0, lib_dir)
|
||||
return deps_dir
|
||||
|
||||
@@ -121,7 +121,7 @@ def alarm_arm_custom_bypass(hass, code=None, entity_id=None):
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
"""Track states and offer events for sensors."""
|
||||
component = EntityComponent(
|
||||
component = hass.data[DOMAIN] = EntityComponent(
|
||||
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
|
||||
|
||||
yield from component.async_setup(config)
|
||||
@@ -154,6 +154,16 @@ def async_setup(hass, config):
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry):
|
||||
"""Setup a config entry."""
|
||||
return await hass.data[DOMAIN].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass, entry):
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DOMAIN].async_unload_entry(entry)
|
||||
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
class AlarmControlPanel(Entity):
|
||||
"""An abstract class for alarm control devices."""
|
||||
|
||||
@@ -4,15 +4,17 @@ Support for Arlo Alarm Control Panels.
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/alarm_control_panel.arlo/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.components.alarm_control_panel import (
|
||||
AlarmControlPanel, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.arlo import (DATA_ARLO, CONF_ATTRIBUTION)
|
||||
from homeassistant.components.arlo import (
|
||||
DATA_ARLO, CONF_ATTRIBUTION, SIGNAL_UPDATE_ARLO)
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED)
|
||||
@@ -36,21 +38,20 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Arlo Alarm Control Panels."""
|
||||
data = hass.data[DATA_ARLO]
|
||||
arlo = hass.data[DATA_ARLO]
|
||||
|
||||
if not data.base_stations:
|
||||
if not arlo.base_stations:
|
||||
return
|
||||
|
||||
home_mode_name = config.get(CONF_HOME_MODE_NAME)
|
||||
away_mode_name = config.get(CONF_AWAY_MODE_NAME)
|
||||
base_stations = []
|
||||
for base_station in data.base_stations:
|
||||
for base_station in arlo.base_stations:
|
||||
base_stations.append(ArloBaseStation(base_station, home_mode_name,
|
||||
away_mode_name))
|
||||
async_add_devices(base_stations, True)
|
||||
add_devices(base_stations, True)
|
||||
|
||||
|
||||
class ArloBaseStation(AlarmControlPanel):
|
||||
@@ -68,6 +69,16 @@ class ArloBaseStation(AlarmControlPanel):
|
||||
"""Return icon."""
|
||||
return ICON
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
async_dispatcher_connect(
|
||||
self.hass, SIGNAL_UPDATE_ARLO, self._update_callback)
|
||||
|
||||
@callback
|
||||
def _update_callback(self):
|
||||
"""Call update method."""
|
||||
self.async_schedule_update_ha_state(True)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
@@ -75,30 +86,22 @@ class ArloBaseStation(AlarmControlPanel):
|
||||
|
||||
def update(self):
|
||||
"""Update the state of the device."""
|
||||
# PyArlo sometimes returns None for mode. So retry 3 times before
|
||||
# returning None.
|
||||
num_retries = 3
|
||||
i = 0
|
||||
while i < num_retries:
|
||||
mode = self._base_station.mode
|
||||
if mode:
|
||||
self._state = self._get_state_from_mode(mode)
|
||||
return
|
||||
i += 1
|
||||
self._state = None
|
||||
_LOGGER.debug("Updating Arlo Alarm Control Panel %s", self.name)
|
||||
mode = self._base_station.mode
|
||||
if mode:
|
||||
self._state = self._get_state_from_mode(mode)
|
||||
else:
|
||||
self._state = None
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_disarm(self, code=None):
|
||||
async def async_alarm_disarm(self, code=None):
|
||||
"""Send disarm command."""
|
||||
self._base_station.mode = DISARMED
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_arm_away(self, code=None):
|
||||
async def async_alarm_arm_away(self, code=None):
|
||||
"""Send arm away command. Uses custom mode."""
|
||||
self._base_station.mode = self._away_mode_name
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_arm_home(self, code=None):
|
||||
async def async_alarm_arm_home(self, code=None):
|
||||
"""Send arm home command. Uses custom mode."""
|
||||
self._base_station.mode = self._home_mode_name
|
||||
|
||||
@@ -125,4 +128,4 @@ class ArloBaseStation(AlarmControlPanel):
|
||||
return STATE_ALARM_ARMED_HOME
|
||||
elif mode == self._away_mode_name:
|
||||
return STATE_ALARM_ARMED_AWAY
|
||||
return None
|
||||
return mode
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
"""
|
||||
Support for HomematicIP alarm control panel.
|
||||
|
||||
For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/alarm_control_panel.homematicip_cloud/
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.const import (
|
||||
STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED,
|
||||
STATE_ALARM_TRIGGERED)
|
||||
from homeassistant.components.alarm_control_panel import AlarmControlPanel
|
||||
from homeassistant.components.homematicip_cloud import (
|
||||
HomematicipGenericDevice, DOMAIN as HMIPC_DOMAIN,
|
||||
HMIPC_HAPID)
|
||||
|
||||
|
||||
DEPENDENCIES = ['homematicip_cloud']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HMIP_OPEN = 'OPEN'
|
||||
HMIP_ZONE_AWAY = 'EXTERNAL'
|
||||
HMIP_ZONE_HOME = 'INTERNAL'
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_devices,
|
||||
discovery_info=None):
|
||||
"""Set up the HomematicIP alarm control devices."""
|
||||
pass
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_devices):
|
||||
"""Set up the HomematicIP alarm control panel from a config entry."""
|
||||
from homematicip.aio.group import AsyncSecurityZoneGroup
|
||||
|
||||
home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home
|
||||
devices = []
|
||||
for group in home.groups:
|
||||
if isinstance(group, AsyncSecurityZoneGroup):
|
||||
devices.append(HomematicipSecurityZone(home, group))
|
||||
|
||||
if devices:
|
||||
async_add_devices(devices)
|
||||
|
||||
|
||||
class HomematicipSecurityZone(HomematicipGenericDevice, AlarmControlPanel):
|
||||
"""Representation of an HomematicIP security zone group."""
|
||||
|
||||
def __init__(self, home, device):
|
||||
"""Initialize the security zone group."""
|
||||
device.modelType = 'Group-SecurityZone'
|
||||
device.windowState = ''
|
||||
super().__init__(home, device)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
if self._device.active:
|
||||
if (self._device.sabotage or self._device.motionDetected or
|
||||
self._device.windowState == HMIP_OPEN):
|
||||
return STATE_ALARM_TRIGGERED
|
||||
|
||||
if self._device.label == HMIP_ZONE_HOME:
|
||||
return STATE_ALARM_ARMED_HOME
|
||||
return STATE_ALARM_ARMED_AWAY
|
||||
|
||||
return STATE_ALARM_DISARMED
|
||||
|
||||
async def async_alarm_disarm(self, code=None):
|
||||
"""Send disarm command."""
|
||||
await self._home.set_security_zones_activation(False, False)
|
||||
|
||||
async def async_alarm_arm_home(self, code=None):
|
||||
"""Send arm home command."""
|
||||
await self._home.set_security_zones_activation(True, False)
|
||||
|
||||
async def async_alarm_arm_away(self, code=None):
|
||||
"""Send arm away command."""
|
||||
await self._home.set_security_zones_activation(True, True)
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the alarm control device."""
|
||||
# The base class is loading the battery property, but device doesn't
|
||||
# have this property - base class needs clean-up.
|
||||
return None
|
||||
@@ -20,7 +20,7 @@ from homeassistant.const import (
|
||||
from homeassistant.components.mqtt import (
|
||||
CONF_AVAILABILITY_TOPIC, CONF_STATE_TOPIC, CONF_COMMAND_TOPIC,
|
||||
CONF_PAYLOAD_AVAILABLE, CONF_PAYLOAD_NOT_AVAILABLE, CONF_QOS,
|
||||
MqttAvailability)
|
||||
CONF_RETAIN, MqttAvailability)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -54,6 +54,7 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
config.get(CONF_STATE_TOPIC),
|
||||
config.get(CONF_COMMAND_TOPIC),
|
||||
config.get(CONF_QOS),
|
||||
config.get(CONF_RETAIN),
|
||||
config.get(CONF_PAYLOAD_DISARM),
|
||||
config.get(CONF_PAYLOAD_ARM_HOME),
|
||||
config.get(CONF_PAYLOAD_ARM_AWAY),
|
||||
@@ -66,9 +67,9 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
class MqttAlarm(MqttAvailability, alarm.AlarmControlPanel):
|
||||
"""Representation of a MQTT alarm status."""
|
||||
|
||||
def __init__(self, name, state_topic, command_topic, qos, payload_disarm,
|
||||
payload_arm_home, payload_arm_away, code, availability_topic,
|
||||
payload_available, payload_not_available):
|
||||
def __init__(self, name, state_topic, command_topic, qos, retain,
|
||||
payload_disarm, payload_arm_home, payload_arm_away, code,
|
||||
availability_topic, payload_available, payload_not_available):
|
||||
"""Init the MQTT Alarm Control Panel."""
|
||||
super().__init__(availability_topic, qos, payload_available,
|
||||
payload_not_available)
|
||||
@@ -77,6 +78,7 @@ class MqttAlarm(MqttAvailability, alarm.AlarmControlPanel):
|
||||
self._state_topic = state_topic
|
||||
self._command_topic = command_topic
|
||||
self._qos = qos
|
||||
self._retain = retain
|
||||
self._payload_disarm = payload_disarm
|
||||
self._payload_arm_home = payload_arm_home
|
||||
self._payload_arm_away = payload_arm_away
|
||||
@@ -134,7 +136,8 @@ class MqttAlarm(MqttAvailability, alarm.AlarmControlPanel):
|
||||
if not self._validate_code(code, 'disarming'):
|
||||
return
|
||||
mqtt.async_publish(
|
||||
self.hass, self._command_topic, self._payload_disarm, self._qos)
|
||||
self.hass, self._command_topic, self._payload_disarm, self._qos,
|
||||
self._retain)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_arm_home(self, code=None):
|
||||
@@ -145,7 +148,8 @@ class MqttAlarm(MqttAvailability, alarm.AlarmControlPanel):
|
||||
if not self._validate_code(code, 'arming home'):
|
||||
return
|
||||
mqtt.async_publish(
|
||||
self.hass, self._command_topic, self._payload_arm_home, self._qos)
|
||||
self.hass, self._command_topic, self._payload_arm_home, self._qos,
|
||||
self._retain)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_alarm_arm_away(self, code=None):
|
||||
@@ -156,7 +160,8 @@ class MqttAlarm(MqttAvailability, alarm.AlarmControlPanel):
|
||||
if not self._validate_code(code, 'arming away'):
|
||||
return
|
||||
mqtt.async_publish(
|
||||
self.hass, self._command_topic, self._payload_arm_away, self._qos)
|
||||
self.hass, self._command_topic, self._payload_arm_away, self._qos,
|
||||
self._retain)
|
||||
|
||||
def _validate_code(self, code, state):
|
||||
"""Validate given code."""
|
||||
|
||||
@@ -107,7 +107,6 @@ class _DisplayCategory(object):
|
||||
THERMOSTAT = "THERMOSTAT"
|
||||
|
||||
# Indicates the endpoint is a television.
|
||||
# pylint: disable=invalid-name
|
||||
TV = "TV"
|
||||
|
||||
|
||||
@@ -271,11 +270,14 @@ class _AlexaInterface(object):
|
||||
"""Return properties serialized for an API response."""
|
||||
for prop in self.properties_supported():
|
||||
prop_name = prop['name']
|
||||
yield {
|
||||
'name': prop_name,
|
||||
'namespace': self.name(),
|
||||
'value': self.get_property(prop_name),
|
||||
}
|
||||
# pylint: disable=assignment-from-no-return
|
||||
prop_value = self.get_property(prop_name)
|
||||
if prop_value is not None:
|
||||
yield {
|
||||
'name': prop_name,
|
||||
'namespace': self.name(),
|
||||
'value': prop_value,
|
||||
}
|
||||
|
||||
|
||||
class _AlexaPowerController(_AlexaInterface):
|
||||
@@ -439,14 +441,17 @@ class _AlexaThermostatController(_AlexaInterface):
|
||||
unit = self.entity.attributes[CONF_UNIT_OF_MEASUREMENT]
|
||||
temp = None
|
||||
if name == 'targetSetpoint':
|
||||
temp = self.entity.attributes.get(ATTR_TEMPERATURE)
|
||||
temp = self.entity.attributes.get(climate.ATTR_TEMPERATURE)
|
||||
elif name == 'lowerSetpoint':
|
||||
temp = self.entity.attributes.get(climate.ATTR_TARGET_TEMP_LOW)
|
||||
elif name == 'upperSetpoint':
|
||||
temp = self.entity.attributes.get(climate.ATTR_TARGET_TEMP_HIGH)
|
||||
if temp is None:
|
||||
else:
|
||||
raise _UnsupportedProperty(name)
|
||||
|
||||
if temp is None:
|
||||
return None
|
||||
|
||||
return {
|
||||
'value': float(temp),
|
||||
'scale': API_TEMP_UNITS[unit],
|
||||
@@ -1474,9 +1479,6 @@ async def async_api_set_thermostat_mode(hass, config, request, entity):
|
||||
mode = mode if isinstance(mode, str) else mode['value']
|
||||
|
||||
operation_list = entity.attributes.get(climate.ATTR_OPERATION_LIST)
|
||||
# Work around a pylint false positive due to
|
||||
# https://github.com/PyCQA/pylint/issues/1830
|
||||
# pylint: disable=stop-iteration-return
|
||||
ha_mode = next(
|
||||
(k for k, v in API_THERMOSTAT_MODES.items() if v == mode),
|
||||
None
|
||||
|
||||
@@ -18,7 +18,7 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import discovery
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['amcrest==1.2.2']
|
||||
REQUIREMENTS = ['amcrest==1.2.3']
|
||||
DEPENDENCIES = ['ffmpeg']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -81,7 +81,6 @@ class APIEventStream(HomeAssistantView):
|
||||
|
||||
async def get(self, request):
|
||||
"""Provide a streaming interface for the event bus."""
|
||||
# pylint: disable=no-self-use
|
||||
hass = request.app['hass']
|
||||
stop_obj = object()
|
||||
to_write = asyncio.Queue(loop=hass.loop)
|
||||
|
||||
@@ -5,14 +5,18 @@ For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/arlo/
|
||||
"""
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
from requests.exceptions import HTTPError, ConnectTimeout
|
||||
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.const import CONF_USERNAME, CONF_PASSWORD
|
||||
from homeassistant.const import (
|
||||
CONF_USERNAME, CONF_PASSWORD, CONF_SCAN_INTERVAL)
|
||||
from homeassistant.helpers.event import track_time_interval
|
||||
from homeassistant.helpers.dispatcher import dispatcher_send
|
||||
|
||||
REQUIREMENTS = ['pyarlo==0.1.2']
|
||||
REQUIREMENTS = ['pyarlo==0.1.9']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,10 +29,16 @@ DOMAIN = 'arlo'
|
||||
NOTIFICATION_ID = 'arlo_notification'
|
||||
NOTIFICATION_TITLE = 'Arlo Component Setup'
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
SIGNAL_UPDATE_ARLO = "arlo_update"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL):
|
||||
cv.time_period,
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
@@ -38,6 +48,7 @@ def setup(hass, config):
|
||||
conf = config[DOMAIN]
|
||||
username = conf.get(CONF_USERNAME)
|
||||
password = conf.get(CONF_PASSWORD)
|
||||
scan_interval = conf.get(CONF_SCAN_INTERVAL)
|
||||
|
||||
try:
|
||||
from pyarlo import PyArlo
|
||||
@@ -45,7 +56,17 @@ def setup(hass, config):
|
||||
arlo = PyArlo(username, password, preload=False)
|
||||
if not arlo.is_connected:
|
||||
return False
|
||||
|
||||
# assign refresh period to base station thread
|
||||
arlo_base_station = next((
|
||||
station for station in arlo.base_stations), None)
|
||||
|
||||
if arlo_base_station is None:
|
||||
return False
|
||||
|
||||
arlo_base_station.refresh_rate = scan_interval.total_seconds()
|
||||
hass.data[DATA_ARLO] = arlo
|
||||
|
||||
except (ConnectTimeout, HTTPError) as ex:
|
||||
_LOGGER.error("Unable to connect to Netgear Arlo: %s", str(ex))
|
||||
hass.components.persistent_notification.create(
|
||||
@@ -55,4 +76,17 @@ def setup(hass, config):
|
||||
title=NOTIFICATION_TITLE,
|
||||
notification_id=NOTIFICATION_ID)
|
||||
return False
|
||||
|
||||
def hub_refresh(event_time):
|
||||
"""Call ArloHub to refresh information."""
|
||||
_LOGGER.info("Updating Arlo Hub component")
|
||||
hass.data[DATA_ARLO].update(update_cameras=True,
|
||||
update_base_station=True)
|
||||
dispatcher_send(hass, SIGNAL_UPDATE_ARLO)
|
||||
|
||||
# register service
|
||||
hass.services.register(DOMAIN, 'update', hub_refresh)
|
||||
|
||||
# register scan interval for ArloHub
|
||||
track_time_interval(hass, hub_refresh, scan_interval)
|
||||
return True
|
||||
|
||||
@@ -102,6 +102,7 @@ a limited expiration.
|
||||
"token_type": "Bearer"
|
||||
}
|
||||
"""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
@@ -112,13 +113,22 @@ from homeassistant import data_entry_flow
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.data_entry_flow import (
|
||||
FlowManagerIndexView, FlowManagerResourceView)
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import indieauth
|
||||
|
||||
from .client import verify_client
|
||||
|
||||
DOMAIN = 'auth'
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
WS_TYPE_CURRENT_USER = 'auth/current_user'
|
||||
SCHEMA_WS_CURRENT_USER = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
|
||||
vol.Required('type'): WS_TYPE_CURRENT_USER,
|
||||
})
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -133,6 +143,11 @@ async def async_setup(hass, config):
|
||||
hass.http.register_view(GrantTokenView(retrieve_credentials))
|
||||
hass.http.register_view(LinkUserView(retrieve_credentials))
|
||||
|
||||
hass.components.websocket_api.async_register_command(
|
||||
WS_TYPE_CURRENT_USER, websocket_current_user,
|
||||
SCHEMA_WS_CURRENT_USER
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -143,14 +158,13 @@ class AuthProvidersView(HomeAssistantView):
|
||||
name = 'api:auth:providers'
|
||||
requires_auth = False
|
||||
|
||||
@verify_client
|
||||
async def get(self, request, client):
|
||||
async def get(self, request):
|
||||
"""Get available auth providers."""
|
||||
return self.json([{
|
||||
'name': provider.name,
|
||||
'id': provider.id,
|
||||
'type': provider.type,
|
||||
} for provider in request.app['hass'].auth.async_auth_providers])
|
||||
} for provider in request.app['hass'].auth.auth_providers])
|
||||
|
||||
|
||||
class LoginFlowIndexView(FlowManagerIndexView):
|
||||
@@ -164,16 +178,16 @@ class LoginFlowIndexView(FlowManagerIndexView):
|
||||
"""Do not allow index of flows in progress."""
|
||||
return aiohttp.web.Response(status=405)
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
@verify_client
|
||||
@RequestDataValidator(vol.Schema({
|
||||
vol.Required('client_id'): str,
|
||||
vol.Required('handler'): vol.Any(str, list),
|
||||
vol.Required('redirect_uri'): str,
|
||||
}))
|
||||
async def post(self, request, client, data):
|
||||
async def post(self, request, data):
|
||||
"""Create a new login flow."""
|
||||
if data['redirect_uri'] not in client.redirect_uris:
|
||||
return self.json_message('invalid redirect uri', )
|
||||
if not indieauth.verify_redirect_uri(data['client_id'],
|
||||
data['redirect_uri']):
|
||||
return self.json_message('invalid client id or redirect uri', 400)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
return await super().post(request)
|
||||
@@ -191,16 +205,20 @@ class LoginFlowResourceView(FlowManagerResourceView):
|
||||
super().__init__(flow_mgr)
|
||||
self._store_credentials = store_credentials
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
async def get(self, request):
|
||||
async def get(self, request, flow_id):
|
||||
"""Do not allow getting status of a flow in progress."""
|
||||
return self.json_message('Invalid flow specified', 404)
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
@verify_client
|
||||
@RequestDataValidator(vol.Schema(dict), allow_empty=True)
|
||||
async def post(self, request, client, flow_id, data):
|
||||
@RequestDataValidator(vol.Schema({
|
||||
'client_id': str
|
||||
}, extra=vol.ALLOW_EXTRA))
|
||||
async def post(self, request, flow_id, data):
|
||||
"""Handle progressing a login flow request."""
|
||||
client_id = data.pop('client_id')
|
||||
|
||||
if not indieauth.verify_client_id(client_id):
|
||||
return self.json_message('Invalid client id', 400)
|
||||
|
||||
try:
|
||||
result = await self._flow_mgr.async_configure(flow_id, data)
|
||||
except data_entry_flow.UnknownFlow:
|
||||
@@ -212,7 +230,7 @@ class LoginFlowResourceView(FlowManagerResourceView):
|
||||
return self.json(self._prepare_result_json(result))
|
||||
|
||||
result.pop('data')
|
||||
result['result'] = self._store_credentials(client.id, result['result'])
|
||||
result['result'] = self._store_credentials(client_id, result['result'])
|
||||
|
||||
return self.json(result)
|
||||
|
||||
@@ -228,20 +246,26 @@ class GrantTokenView(HomeAssistantView):
|
||||
"""Initialize the grant token view."""
|
||||
self._retrieve_credentials = retrieve_credentials
|
||||
|
||||
@verify_client
|
||||
async def post(self, request, client):
|
||||
async def post(self, request):
|
||||
"""Grant a token."""
|
||||
hass = request.app['hass']
|
||||
data = await request.post()
|
||||
|
||||
client_id = data.get('client_id')
|
||||
if client_id is None or not indieauth.verify_client_id(client_id):
|
||||
return self.json({
|
||||
'error': 'invalid_request',
|
||||
'error_description': 'Invalid client id',
|
||||
}, status_code=400)
|
||||
|
||||
grant_type = data.get('grant_type')
|
||||
|
||||
if grant_type == 'authorization_code':
|
||||
return await self._async_handle_auth_code(
|
||||
hass, client.id, data)
|
||||
return await self._async_handle_auth_code(hass, client_id, data)
|
||||
|
||||
elif grant_type == 'refresh_token':
|
||||
return await self._async_handle_refresh_token(
|
||||
hass, client.id, data)
|
||||
hass, client_id, data)
|
||||
|
||||
return self.json({
|
||||
'error': 'unsupported_grant_type',
|
||||
@@ -261,9 +285,17 @@ class GrantTokenView(HomeAssistantView):
|
||||
if credentials is None:
|
||||
return self.json({
|
||||
'error': 'invalid_request',
|
||||
'error_description': 'Invalid code',
|
||||
}, status_code=400)
|
||||
|
||||
user = await hass.auth.async_get_or_create_user(credentials)
|
||||
|
||||
if not user.is_active:
|
||||
return self.json({
|
||||
'error': 'access_denied',
|
||||
'error_description': 'User is not active',
|
||||
}, status_code=403)
|
||||
|
||||
refresh_token = await hass.auth.async_create_refresh_token(user,
|
||||
client_id)
|
||||
access_token = hass.auth.async_create_access_token(refresh_token)
|
||||
@@ -340,12 +372,43 @@ def _create_cred_store():
|
||||
def store_credentials(client_id, credentials):
|
||||
"""Store credentials and return a code to retrieve it."""
|
||||
code = uuid.uuid4().hex
|
||||
temp_credentials[(client_id, code)] = credentials
|
||||
temp_credentials[(client_id, code)] = (dt_util.utcnow(), credentials)
|
||||
return code
|
||||
|
||||
@callback
|
||||
def retrieve_credentials(client_id, code):
|
||||
"""Retrieve credentials."""
|
||||
return temp_credentials.pop((client_id, code), None)
|
||||
key = (client_id, code)
|
||||
|
||||
if key not in temp_credentials:
|
||||
return None
|
||||
|
||||
created, credentials = temp_credentials.pop(key)
|
||||
|
||||
# OAuth 4.2.1
|
||||
# The authorization code MUST expire shortly after it is issued to
|
||||
# mitigate the risk of leaks. A maximum authorization code lifetime of
|
||||
# 10 minutes is RECOMMENDED.
|
||||
if dt_util.utcnow() - created < timedelta(minutes=10):
|
||||
return credentials
|
||||
|
||||
return None
|
||||
|
||||
return store_credentials, retrieve_credentials
|
||||
|
||||
|
||||
@callback
|
||||
def websocket_current_user(hass, connection, msg):
|
||||
"""Return the current user."""
|
||||
user = connection.request.get('hass_user')
|
||||
|
||||
if user is None:
|
||||
connection.to_write.put_nowait(websocket_api.error_message(
|
||||
msg['id'], 'no_user', 'Not authenticated as a user'))
|
||||
return
|
||||
|
||||
connection.to_write.put_nowait(websocket_api.result_message(msg['id'], {
|
||||
'id': user.id,
|
||||
'name': user.name,
|
||||
'is_owner': user.is_owner,
|
||||
}))
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
"""Helpers to resolve client ID/secret."""
|
||||
import base64
|
||||
from functools import wraps
|
||||
import hmac
|
||||
|
||||
import aiohttp.hdrs
|
||||
|
||||
|
||||
def verify_client(method):
|
||||
"""Decorator to verify client id/secret on requests."""
|
||||
@wraps(method)
|
||||
async def wrapper(view, request, *args, **kwargs):
|
||||
"""Verify client id/secret before doing request."""
|
||||
client = await _verify_client(request)
|
||||
|
||||
if client is None:
|
||||
return view.json({
|
||||
'error': 'invalid_client',
|
||||
}, status_code=401)
|
||||
|
||||
return await method(
|
||||
view, request, *args, **kwargs, client=client)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def _verify_client(request):
|
||||
"""Method to verify the client id/secret in consistent time.
|
||||
|
||||
By using a consistent time for looking up client id and comparing the
|
||||
secret, we prevent attacks by malicious actors trying different client ids
|
||||
and are able to derive from the time it takes to process the request if
|
||||
they guessed the client id correctly.
|
||||
"""
|
||||
if aiohttp.hdrs.AUTHORIZATION not in request.headers:
|
||||
return None
|
||||
|
||||
auth_type, auth_value = \
|
||||
request.headers.get(aiohttp.hdrs.AUTHORIZATION).split(' ', 1)
|
||||
|
||||
if auth_type != 'Basic':
|
||||
return None
|
||||
|
||||
decoded = base64.b64decode(auth_value).decode('utf-8')
|
||||
try:
|
||||
client_id, client_secret = decoded.split(':', 1)
|
||||
except ValueError:
|
||||
# If no ':' in decoded
|
||||
client_id, client_secret = decoded, None
|
||||
|
||||
return await async_secure_get_client(
|
||||
request.app['hass'], client_id, client_secret)
|
||||
|
||||
|
||||
async def async_secure_get_client(hass, client_id, client_secret):
|
||||
"""Get a client id/secret in consistent time."""
|
||||
client = await hass.auth.async_get_client(client_id)
|
||||
|
||||
if client is None:
|
||||
if client_secret is not None:
|
||||
# Still do a compare so we run same time as if a client was found.
|
||||
hmac.compare_digest(client_secret.encode('utf-8'),
|
||||
client_secret.encode('utf-8'))
|
||||
return None
|
||||
|
||||
if client.secret is None:
|
||||
return client
|
||||
|
||||
elif client_secret is None:
|
||||
# Still do a compare so we run same time as if a secret was passed.
|
||||
hmac.compare_digest(client.secret.encode('utf-8'),
|
||||
client.secret.encode('utf-8'))
|
||||
return None
|
||||
|
||||
elif hmac.compare_digest(client_secret.encode('utf-8'),
|
||||
client.secret.encode('utf-8')):
|
||||
return client
|
||||
|
||||
return None
|
||||
130
homeassistant/components/auth/indieauth.py
Normal file
130
homeassistant/components/auth/indieauth.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Helpers to resolve client ID/secret."""
|
||||
from ipaddress import ip_address, ip_network
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# IP addresses of loopback interfaces
|
||||
ALLOWED_IPS = (
|
||||
ip_address('127.0.0.1'),
|
||||
ip_address('::1'),
|
||||
)
|
||||
|
||||
# RFC1918 - Address allocation for Private Internets
|
||||
ALLOWED_NETWORKS = (
|
||||
ip_network('10.0.0.0/8'),
|
||||
ip_network('172.16.0.0/12'),
|
||||
ip_network('192.168.0.0/16'),
|
||||
)
|
||||
|
||||
|
||||
def verify_redirect_uri(client_id, redirect_uri):
|
||||
"""Verify that the client and redirect uri match."""
|
||||
try:
|
||||
client_id_parts = _parse_client_id(client_id)
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
redirect_parts = _parse_url(redirect_uri)
|
||||
|
||||
# IndieAuth 4.2.2 allows for redirect_uri to be on different domain
|
||||
# but needs to be specified in link tag when fetching `client_id`.
|
||||
# This is not implemented.
|
||||
|
||||
# Verify redirect url and client url have same scheme and domain.
|
||||
return (
|
||||
client_id_parts.scheme == redirect_parts.scheme and
|
||||
client_id_parts.netloc == redirect_parts.netloc
|
||||
)
|
||||
|
||||
|
||||
def verify_client_id(client_id):
|
||||
"""Verify that the client id is valid."""
|
||||
try:
|
||||
_parse_client_id(client_id)
|
||||
return True
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def _parse_url(url):
|
||||
"""Parse a url in parts and canonicalize according to IndieAuth."""
|
||||
parts = urlparse(url)
|
||||
|
||||
# Canonicalize a url according to IndieAuth 3.2.
|
||||
|
||||
# SHOULD convert the hostname to lowercase
|
||||
parts = parts._replace(netloc=parts.netloc.lower())
|
||||
|
||||
# If a URL with no path component is ever encountered,
|
||||
# it MUST be treated as if it had the path /.
|
||||
if parts.path == '':
|
||||
parts = parts._replace(path='/')
|
||||
|
||||
return parts
|
||||
|
||||
|
||||
def _parse_client_id(client_id):
|
||||
"""Test if client id is a valid URL according to IndieAuth section 3.2.
|
||||
|
||||
https://indieauth.spec.indieweb.org/#client-identifier
|
||||
"""
|
||||
parts = _parse_url(client_id)
|
||||
|
||||
# Client identifier URLs
|
||||
# MUST have either an https or http scheme
|
||||
if parts.scheme not in ('http', 'https'):
|
||||
raise ValueError()
|
||||
|
||||
# MUST contain a path component
|
||||
# Handled by url canonicalization.
|
||||
|
||||
# MUST NOT contain single-dot or double-dot path segments
|
||||
if any(segment in ('.', '..') for segment in parts.path.split('/')):
|
||||
raise ValueError(
|
||||
'Client ID cannot contain single-dot or double-dot path segments')
|
||||
|
||||
# MUST NOT contain a fragment component
|
||||
if parts.fragment != '':
|
||||
raise ValueError('Client ID cannot contain a fragment')
|
||||
|
||||
# MUST NOT contain a username or password component
|
||||
if parts.username is not None:
|
||||
raise ValueError('Client ID cannot contain username')
|
||||
|
||||
if parts.password is not None:
|
||||
raise ValueError('Client ID cannot contain password')
|
||||
|
||||
# MAY contain a port
|
||||
try:
|
||||
# parts raises ValueError when port cannot be parsed as int
|
||||
parts.port
|
||||
except ValueError:
|
||||
raise ValueError('Client ID contains invalid port')
|
||||
|
||||
# Additionally, hostnames
|
||||
# MUST be domain names or a loopback interface and
|
||||
# MUST NOT be IPv4 or IPv6 addresses except for IPv4 127.0.0.1
|
||||
# or IPv6 [::1]
|
||||
|
||||
# We are not goint to follow the spec here. We are going to allow
|
||||
# any internal network IP to be used inside a client id.
|
||||
|
||||
address = None
|
||||
|
||||
try:
|
||||
netloc = parts.netloc
|
||||
|
||||
# Strip the [, ] from ipv6 addresses before parsing
|
||||
if netloc[0] == '[' and netloc[-1] == ']':
|
||||
netloc = netloc[1:-1]
|
||||
|
||||
address = ip_address(netloc)
|
||||
except ValueError:
|
||||
# Not an ip address
|
||||
pass
|
||||
|
||||
if (address is None or
|
||||
address in ALLOWED_IPS or
|
||||
any(address in network for network in ALLOWED_NETWORKS)):
|
||||
return parts
|
||||
|
||||
raise ValueError('Hostname should be a domain name or local IP address')
|
||||
@@ -145,7 +145,7 @@ def request_configuration(hass, config, name, host, serialnumber):
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up for Axis devices."""
|
||||
def _shutdown(call): # pylint: disable=unused-argument
|
||||
def _shutdown(call):
|
||||
"""Stop the event stream on shutdown."""
|
||||
for serialnumber, device in AXIS_DEVICES.items():
|
||||
_LOGGER.info("Stopping event stream for %s.", serialnumber)
|
||||
@@ -272,8 +272,7 @@ class AxisDeviceEvent(Entity):
|
||||
|
||||
def _update_callback(self):
|
||||
"""Update the sensor's state, if needed."""
|
||||
self.update()
|
||||
self.schedule_update_ha_state()
|
||||
self.schedule_update_ha_state(True)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
||||
@@ -16,7 +16,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DOMAIN = 'bbb_gpio'
|
||||
|
||||
|
||||
# pylint: disable=no-member
|
||||
def setup(hass, config):
|
||||
"""Set up the BeagleBone Black GPIO component."""
|
||||
# pylint: disable=import-error
|
||||
@@ -34,41 +33,39 @@ def setup(hass, config):
|
||||
return True
|
||||
|
||||
|
||||
# noqa: F821
|
||||
|
||||
def setup_output(pin):
|
||||
"""Set up a GPIO as output."""
|
||||
# pylint: disable=import-error,undefined-variable
|
||||
# pylint: disable=import-error
|
||||
import Adafruit_BBIO.GPIO as GPIO
|
||||
GPIO.setup(pin, GPIO.OUT)
|
||||
|
||||
|
||||
def setup_input(pin, pull_mode):
|
||||
"""Set up a GPIO as input."""
|
||||
# pylint: disable=import-error,undefined-variable
|
||||
# pylint: disable=import-error
|
||||
import Adafruit_BBIO.GPIO as GPIO
|
||||
GPIO.setup(pin, GPIO.IN, # noqa: F821
|
||||
GPIO.PUD_DOWN if pull_mode == 'DOWN' # noqa: F821
|
||||
else GPIO.PUD_UP) # noqa: F821
|
||||
GPIO.setup(pin, GPIO.IN,
|
||||
GPIO.PUD_DOWN if pull_mode == 'DOWN'
|
||||
else GPIO.PUD_UP)
|
||||
|
||||
|
||||
def write_output(pin, value):
|
||||
"""Write a value to a GPIO."""
|
||||
# pylint: disable=import-error,undefined-variable
|
||||
# pylint: disable=import-error
|
||||
import Adafruit_BBIO.GPIO as GPIO
|
||||
GPIO.output(pin, value)
|
||||
|
||||
|
||||
def read_input(pin):
|
||||
"""Read a value from a GPIO."""
|
||||
# pylint: disable=import-error,undefined-variable
|
||||
# pylint: disable=import-error
|
||||
import Adafruit_BBIO.GPIO as GPIO
|
||||
return GPIO.input(pin) is GPIO.HIGH
|
||||
|
||||
|
||||
def edge_detect(pin, event_callback, bounce):
|
||||
"""Add detection for RISING and FALLING events."""
|
||||
# pylint: disable=import-error,undefined-variable
|
||||
# pylint: disable=import-error
|
||||
import Adafruit_BBIO.GPIO as GPIO
|
||||
GPIO.add_event_detect(
|
||||
pin, GPIO.BOTH, callback=event_callback, bouncetime=bounce)
|
||||
|
||||
@@ -67,7 +67,6 @@ async def async_unload_entry(hass, entry):
|
||||
return await hass.data[DOMAIN].async_unload_entry(entry)
|
||||
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
class BinarySensorDevice(Entity):
|
||||
"""Represent a binary sensor."""
|
||||
|
||||
|
||||
@@ -124,11 +124,11 @@ class BMWConnectedDriveSensor(BinarySensorDevice):
|
||||
result['check_control_messages'] = check_control_messages
|
||||
elif self._attribute == 'charging_status':
|
||||
result['charging_status'] = vehicle_state.charging_status.value
|
||||
# pylint: disable=W0212
|
||||
# pylint: disable=protected-access
|
||||
result['last_charging_end_result'] = \
|
||||
vehicle_state._attributes['lastChargingEndResult']
|
||||
if self._attribute == 'connection_status':
|
||||
# pylint: disable=W0212
|
||||
# pylint: disable=protected-access
|
||||
result['connection_status'] = \
|
||||
vehicle_state._attributes['connectionStatus']
|
||||
|
||||
@@ -166,7 +166,7 @@ class BMWConnectedDriveSensor(BinarySensorDevice):
|
||||
# device class plug: On means device is plugged in,
|
||||
# Off means device is unplugged
|
||||
if self._attribute == 'connection_status':
|
||||
# pylint: disable=W0212
|
||||
# pylint: disable=protected-access
|
||||
self._state = (vehicle_state._attributes['connectionStatus'] ==
|
||||
'CONNECTED')
|
||||
|
||||
|
||||
@@ -35,7 +35,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Command line Binary Sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
@@ -5,9 +5,9 @@ For more details about this component, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.deconz/
|
||||
"""
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.deconz import (
|
||||
CONF_ALLOW_CLIP_SENSOR, DOMAIN as DATA_DECONZ, DATA_DECONZ_ID,
|
||||
DATA_DECONZ_UNSUB)
|
||||
from homeassistant.components.deconz.const import (
|
||||
ATTR_DARK, ATTR_ON, CONF_ALLOW_CLIP_SENSOR, DOMAIN as DATA_DECONZ,
|
||||
DATA_DECONZ_ID, DATA_DECONZ_UNSUB)
|
||||
from homeassistant.const import ATTR_BATTERY_LEVEL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
@@ -62,7 +62,8 @@ class DeconzBinarySensor(BinarySensorDevice):
|
||||
"""
|
||||
if reason['state'] or \
|
||||
'reachable' in reason['attr'] or \
|
||||
'battery' in reason['attr']:
|
||||
'battery' in reason['attr'] or \
|
||||
'on' in reason['attr']:
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
@property
|
||||
@@ -107,6 +108,8 @@ class DeconzBinarySensor(BinarySensorDevice):
|
||||
attr = {}
|
||||
if self._sensor.battery:
|
||||
attr[ATTR_BATTERY_LEVEL] = self._sensor.battery
|
||||
if self._sensor.on is not None:
|
||||
attr[ATTR_ON] = self._sensor.on
|
||||
if self._sensor.type in PRESENCE and self._sensor.dark is not None:
|
||||
attr['dark'] = self._sensor.dark
|
||||
attr[ATTR_DARK] = self._sensor.dark
|
||||
return attr
|
||||
|
||||
@@ -14,7 +14,8 @@ from homeassistant.components.binary_sensor import (
|
||||
from homeassistant.components.digital_ocean import (
|
||||
CONF_DROPLETS, ATTR_CREATED_AT, ATTR_DROPLET_ID, ATTR_DROPLET_NAME,
|
||||
ATTR_FEATURES, ATTR_IPV4_ADDRESS, ATTR_IPV6_ADDRESS, ATTR_MEMORY,
|
||||
ATTR_REGION, ATTR_VCPUS, DATA_DIGITAL_OCEAN)
|
||||
ATTR_REGION, ATTR_VCPUS, CONF_ATTRIBUTION, DATA_DIGITAL_OCEAN)
|
||||
from homeassistant.const import ATTR_ATTRIBUTION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -75,6 +76,7 @@ class DigitalOceanBinarySensor(BinarySensorDevice):
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the Digital Ocean droplet."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
|
||||
ATTR_CREATED_AT: self.data.created_at,
|
||||
ATTR_DROPLET_ID: self.data.id,
|
||||
ATTR_DROPLET_NAME: self.data.name,
|
||||
|
||||
@@ -5,7 +5,6 @@ For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.eight_sleep/
|
||||
"""
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.eight_sleep import (
|
||||
@@ -16,8 +15,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEPENDENCIES = ['eight_sleep']
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_devices,
|
||||
discovery_info=None):
|
||||
"""Set up the eight sleep binary sensor."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
@@ -63,7 +62,6 @@ class EightHeatSensor(EightSleepHeatEntity, BinarySensorDevice):
|
||||
"""Return true if the binary sensor is on."""
|
||||
return self._state
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_update(self):
|
||||
async def async_update(self):
|
||||
"""Retrieve latest state."""
|
||||
self._state = self._usrobj.bed_presence
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.const import (
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA)
|
||||
|
||||
REQUIREMENTS = ['https://github.com/soldag/pyflic/archive/0.4.zip#pyflic==0.4']
|
||||
REQUIREMENTS = ['pyflic-homeassistant==0.4.dev0']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -23,7 +23,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the GC100 devices."""
|
||||
binary_sensors = []
|
||||
@@ -40,7 +39,6 @@ class GC100BinarySensor(BinarySensorDevice):
|
||||
|
||||
def __init__(self, name, port_addr, gc100):
|
||||
"""Initialize the GC100 binary sensor."""
|
||||
# pylint: disable=no-member
|
||||
self._name = name or DEVICE_DEFAULT_NAME
|
||||
self._port_addr = port_addr
|
||||
self._gc100 = gc100
|
||||
|
||||
@@ -9,8 +9,8 @@ import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.homematicip_cloud import (
|
||||
HomematicipGenericDevice, DOMAIN as HOMEMATICIP_CLOUD_DOMAIN,
|
||||
ATTR_HOME_ID)
|
||||
HomematicipGenericDevice, DOMAIN as HMIPC_DOMAIN,
|
||||
HMIPC_HAPID)
|
||||
|
||||
DEPENDENCIES = ['homematicip_cloud']
|
||||
|
||||
@@ -21,17 +21,18 @@ ATTR_EVENT_DELAY = 'event_delay'
|
||||
ATTR_MOTION_DETECTED = 'motion_detected'
|
||||
ATTR_ILLUMINATION = 'illumination'
|
||||
|
||||
HMIP_OPEN = 'open'
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_devices,
|
||||
discovery_info=None):
|
||||
"""Set up the HomematicIP binary sensor devices."""
|
||||
"""Set up the binary sensor devices."""
|
||||
pass
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_devices):
|
||||
"""Set up the HomematicIP binary sensor from a config entry."""
|
||||
from homematicip.device import (ShutterContact, MotionDetectorIndoor)
|
||||
|
||||
if discovery_info is None:
|
||||
return
|
||||
home = hass.data[HOMEMATICIP_CLOUD_DOMAIN][discovery_info[ATTR_HOME_ID]]
|
||||
home = hass.data[HMIPC_DOMAIN][config_entry.data[HMIPC_HAPID]].home
|
||||
devices = []
|
||||
for device in home.devices:
|
||||
if isinstance(device, ShutterContact):
|
||||
@@ -58,11 +59,13 @@ class HomematicipShutterContact(HomematicipGenericDevice, BinarySensorDevice):
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the shutter contact is on/open."""
|
||||
from homematicip.base.enums import WindowState
|
||||
|
||||
if self._device.sabotage:
|
||||
return True
|
||||
if self._device.windowState is None:
|
||||
return None
|
||||
return self._device.windowState.lower() == HMIP_OPEN
|
||||
return self._device.windowState == WindowState.OPEN
|
||||
|
||||
|
||||
class HomematicipMotionDetector(HomematicipGenericDevice, BinarySensorDevice):
|
||||
|
||||
@@ -8,7 +8,7 @@ https://home-assistant.io/components/binary_sensor.isy994/
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
from typing import Callable # noqa
|
||||
from typing import Callable
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice, DOMAIN
|
||||
@@ -28,7 +28,6 @@ ISY_DEVICE_TYPES = {
|
||||
}
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config: ConfigType,
|
||||
add_devices: Callable[[list], None], discovery_info=None):
|
||||
"""Set up the ISY994 binary sensor platform."""
|
||||
@@ -299,7 +298,6 @@ class ISYBinarySensorHeartbeat(ISYDevice, BinarySensorDevice):
|
||||
# No heartbeat timer is active
|
||||
pass
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@callback
|
||||
def timer_elapsed(now) -> None:
|
||||
"""Heartbeat missed; set state to indicate dead battery."""
|
||||
@@ -314,7 +312,6 @@ class ISYBinarySensorHeartbeat(ISYDevice, BinarySensorDevice):
|
||||
self._heartbeat_timer = async_track_point_in_utc_time(
|
||||
self.hass, timer_elapsed, point_in_time)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def on_update(self, event: object) -> None:
|
||||
"""Ignore node status updates.
|
||||
|
||||
|
||||
@@ -115,7 +115,6 @@ class KNXBinarySensor(BinarySensorDevice):
|
||||
"""Register callbacks to update hass after device was changed."""
|
||||
async def after_update_callback(device):
|
||||
"""Call after device was updated."""
|
||||
# pylint: disable=unused-argument
|
||||
await self.async_update_ha_state()
|
||||
self.device.register_device_updated_cb(after_update_callback)
|
||||
|
||||
|
||||
@@ -52,19 +52,18 @@ class LinodeBinarySensor(BinarySensorDevice):
|
||||
self._node_id = node_id
|
||||
self._state = None
|
||||
self.data = None
|
||||
self._attrs = {}
|
||||
self._name = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
if self.data is not None:
|
||||
return self.data.label
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return true if the binary sensor is on."""
|
||||
if self.data is not None:
|
||||
return self.data.status == 'running'
|
||||
return False
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
@@ -74,8 +73,18 @@ class LinodeBinarySensor(BinarySensorDevice):
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the Linode Node."""
|
||||
if self.data:
|
||||
return {
|
||||
return self._attrs
|
||||
|
||||
def update(self):
|
||||
"""Update state of sensor."""
|
||||
self._linode.update()
|
||||
if self._linode.data is not None:
|
||||
for node in self._linode.data:
|
||||
if node.id == self._node_id:
|
||||
self.data = node
|
||||
if self.data is not None:
|
||||
self._state = self.data.status == 'running'
|
||||
self._attrs = {
|
||||
ATTR_CREATED: self.data.created,
|
||||
ATTR_NODE_ID: self.data.id,
|
||||
ATTR_NODE_NAME: self.data.label,
|
||||
@@ -85,12 +94,4 @@ class LinodeBinarySensor(BinarySensorDevice):
|
||||
ATTR_REGION: self.data.region.country,
|
||||
ATTR_VCPUS: self.data.specs.vcpus,
|
||||
}
|
||||
return {}
|
||||
|
||||
def update(self):
|
||||
"""Update state of sensor."""
|
||||
self._linode.update()
|
||||
if self._linode.data is not None:
|
||||
for node in self._linode.data:
|
||||
if node.id == self._node_id:
|
||||
self.data = node
|
||||
self._name = self.data.label
|
||||
|
||||
@@ -6,6 +6,7 @@ https://home-assistant.io/components/binary_sensor.mqtt/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -24,7 +25,7 @@ import homeassistant.helpers.config_validation as cv
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'MQTT Binary sensor'
|
||||
|
||||
CONF_UNIQUE_ID = 'unique_id'
|
||||
DEFAULT_PAYLOAD_OFF = 'OFF'
|
||||
DEFAULT_PAYLOAD_ON = 'ON'
|
||||
DEFAULT_FORCE_UPDATE = False
|
||||
@@ -37,6 +38,9 @@ PLATFORM_SCHEMA = mqtt.MQTT_RO_PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean,
|
||||
# Integrations shouldn't never expose unique_id through configuration
|
||||
# this here is an exception because MQTT is a msg transport, not a protocol
|
||||
vol.Optional(CONF_UNIQUE_ID): cv.string,
|
||||
}).extend(mqtt.MQTT_AVAILABILITY_SCHEMA.schema)
|
||||
|
||||
|
||||
@@ -61,7 +65,8 @@ def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
config.get(CONF_PAYLOAD_OFF),
|
||||
config.get(CONF_PAYLOAD_AVAILABLE),
|
||||
config.get(CONF_PAYLOAD_NOT_AVAILABLE),
|
||||
value_template
|
||||
value_template,
|
||||
config.get(CONF_UNIQUE_ID),
|
||||
)])
|
||||
|
||||
|
||||
@@ -70,7 +75,8 @@ class MqttBinarySensor(MqttAvailability, BinarySensorDevice):
|
||||
|
||||
def __init__(self, name, state_topic, availability_topic, device_class,
|
||||
qos, force_update, payload_on, payload_off, payload_available,
|
||||
payload_not_available, value_template):
|
||||
payload_not_available, value_template,
|
||||
unique_id: Optional[str]):
|
||||
"""Initialize the MQTT binary sensor."""
|
||||
super().__init__(availability_topic, qos, payload_available,
|
||||
payload_not_available)
|
||||
@@ -83,6 +89,7 @@ class MqttBinarySensor(MqttAvailability, BinarySensorDevice):
|
||||
self._qos = qos
|
||||
self._force_update = force_update
|
||||
self._template = value_template
|
||||
self._unique_id = unique_id
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_added_to_hass(self):
|
||||
@@ -134,3 +141,8 @@ class MqttBinarySensor(MqttAvailability, BinarySensorDevice):
|
||||
def force_update(self):
|
||||
"""Force update."""
|
||||
return self._force_update
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique ID."""
|
||||
return self._unique_id
|
||||
|
||||
@@ -29,7 +29,8 @@ async def async_setup_platform(
|
||||
async_add_devices=async_add_devices)
|
||||
|
||||
|
||||
class MySensorsBinarySensor(mysensors.MySensorsEntity, BinarySensorDevice):
|
||||
class MySensorsBinarySensor(
|
||||
mysensors.device.MySensorsEntity, BinarySensorDevice):
|
||||
"""Representation of a MySensors Binary Sensor child node."""
|
||||
|
||||
@property
|
||||
|
||||
@@ -29,6 +29,7 @@ class MyStromView(HomeAssistantView):
|
||||
|
||||
url = '/api/mystrom'
|
||||
name = 'api:mystrom'
|
||||
supported_actions = ['single', 'double', 'long', 'touch']
|
||||
|
||||
def __init__(self, add_devices):
|
||||
"""Initialize the myStrom URL endpoint."""
|
||||
@@ -44,16 +45,18 @@ class MyStromView(HomeAssistantView):
|
||||
@asyncio.coroutine
|
||||
def _handle(self, hass, data):
|
||||
"""Handle requests to the myStrom endpoint."""
|
||||
button_action = list(data.keys())[0]
|
||||
button_id = data[button_action]
|
||||
entity_id = '{}.{}_{}'.format(DOMAIN, button_id, button_action)
|
||||
button_action = next((
|
||||
parameter for parameter in data
|
||||
if parameter in self.supported_actions), None)
|
||||
|
||||
if button_action not in ['single', 'double', 'long', 'touch']:
|
||||
if button_action is None:
|
||||
_LOGGER.error(
|
||||
"Received unidentified message from myStrom button: %s", data)
|
||||
return ("Received unidentified message: {}".format(data),
|
||||
HTTP_UNPROCESSABLE_ENTITY)
|
||||
|
||||
button_id = data[button_action]
|
||||
entity_id = '{}.{}_{}'.format(DOMAIN, button_id, button_action)
|
||||
if entity_id not in self.buttons:
|
||||
_LOGGER.info("New myStrom button/action detected: %s/%s",
|
||||
button_id, button_action)
|
||||
|
||||
@@ -8,7 +8,8 @@ from itertools import chain
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.nest import DATA_NEST, NestSensorDevice
|
||||
from homeassistant.components.nest import (
|
||||
DATA_NEST, DATA_NEST_CONFIG, CONF_BINARY_SENSORS, NestSensorDevice)
|
||||
from homeassistant.const import CONF_MONITORED_CONDITIONS
|
||||
|
||||
DEPENDENCIES = ['nest']
|
||||
@@ -30,12 +31,10 @@ CAMERA_BINARY_TYPES = {
|
||||
|
||||
STRUCTURE_BINARY_TYPES = {
|
||||
'away': None,
|
||||
# 'security_state', # pending python-nest update
|
||||
}
|
||||
|
||||
STRUCTURE_BINARY_STATE_MAP = {
|
||||
'away': {'away': True, 'home': False},
|
||||
'security_state': {'deter': True, 'ok': False},
|
||||
}
|
||||
|
||||
_BINARY_TYPES_DEPRECATED = [
|
||||
@@ -56,12 +55,19 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Nest binary sensors."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
"""Set up the Nest binary sensors.
|
||||
|
||||
No longer used.
|
||||
"""
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry, async_add_devices):
|
||||
"""Set up a Nest binary sensor based on a config entry."""
|
||||
nest = hass.data[DATA_NEST]
|
||||
|
||||
discovery_info = \
|
||||
hass.data.get(DATA_NEST_CONFIG, {}).get(CONF_BINARY_SENSORS, {})
|
||||
|
||||
# Add all available binary sensors if no Nest binary sensor config is set
|
||||
if discovery_info == {}:
|
||||
conditions = _VALID_BINARY_SENSOR_TYPES
|
||||
@@ -76,32 +82,37 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"for valid options.")
|
||||
_LOGGER.error(wstr)
|
||||
|
||||
sensors = []
|
||||
for structure in nest.structures():
|
||||
sensors += [NestBinarySensor(structure, None, variable)
|
||||
for variable in conditions
|
||||
if variable in STRUCTURE_BINARY_TYPES]
|
||||
device_chain = chain(nest.thermostats(),
|
||||
nest.smoke_co_alarms(),
|
||||
nest.cameras())
|
||||
for structure, device in device_chain:
|
||||
sensors += [NestBinarySensor(structure, device, variable)
|
||||
for variable in conditions
|
||||
if variable in BINARY_TYPES]
|
||||
sensors += [NestBinarySensor(structure, device, variable)
|
||||
for variable in conditions
|
||||
if variable in CLIMATE_BINARY_TYPES
|
||||
and device.is_thermostat]
|
||||
|
||||
if device.is_camera:
|
||||
def get_binary_sensors():
|
||||
"""Get the Nest binary sensors."""
|
||||
sensors = []
|
||||
for structure in nest.structures():
|
||||
sensors += [NestBinarySensor(structure, None, variable)
|
||||
for variable in conditions
|
||||
if variable in STRUCTURE_BINARY_TYPES]
|
||||
device_chain = chain(nest.thermostats(),
|
||||
nest.smoke_co_alarms(),
|
||||
nest.cameras())
|
||||
for structure, device in device_chain:
|
||||
sensors += [NestBinarySensor(structure, device, variable)
|
||||
for variable in conditions
|
||||
if variable in CAMERA_BINARY_TYPES]
|
||||
for activity_zone in device.activity_zones:
|
||||
sensors += [NestActivityZoneSensor(structure,
|
||||
device,
|
||||
activity_zone)]
|
||||
add_devices(sensors, True)
|
||||
if variable in BINARY_TYPES]
|
||||
sensors += [NestBinarySensor(structure, device, variable)
|
||||
for variable in conditions
|
||||
if variable in CLIMATE_BINARY_TYPES
|
||||
and device.is_thermostat]
|
||||
|
||||
if device.is_camera:
|
||||
sensors += [NestBinarySensor(structure, device, variable)
|
||||
for variable in conditions
|
||||
if variable in CAMERA_BINARY_TYPES]
|
||||
for activity_zone in device.activity_zones:
|
||||
sensors += [NestActivityZoneSensor(structure,
|
||||
device,
|
||||
activity_zone)]
|
||||
|
||||
return sensors
|
||||
|
||||
async_add_devices(await hass.async_add_job(get_binary_sensors), True)
|
||||
|
||||
|
||||
class NestBinarySensor(NestSensorDevice, BinarySensorDevice):
|
||||
@@ -122,7 +133,7 @@ class NestBinarySensor(NestSensorDevice, BinarySensorDevice):
|
||||
value = getattr(self.device, self.variable)
|
||||
if self.variable in STRUCTURE_BINARY_TYPES:
|
||||
self._state = bool(STRUCTURE_BINARY_STATE_MAP
|
||||
[self.variable][value])
|
||||
[self.variable].get(value))
|
||||
else:
|
||||
self._state = bool(value)
|
||||
|
||||
|
||||
@@ -57,7 +57,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the access to Netatmo binary sensor."""
|
||||
netatmo = hass.components.netatmo
|
||||
@@ -68,12 +67,12 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
|
||||
module_name = None
|
||||
|
||||
import lnetatmo
|
||||
import pyatmo
|
||||
try:
|
||||
data = CameraData(netatmo.NETATMO_AUTH, home)
|
||||
if not data.get_camera_names():
|
||||
return None
|
||||
except lnetatmo.NoDevice:
|
||||
except pyatmo.NoDevice:
|
||||
return None
|
||||
|
||||
welcome_sensors = config.get(
|
||||
|
||||
@@ -33,7 +33,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the available OctoPrint binary sensors."""
|
||||
octoprint_api = hass.data[DOMAIN]["api"]
|
||||
|
||||
@@ -44,7 +44,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up Pilight Binary Sensor."""
|
||||
disarm = config.get(CONF_DISARM_AFTER_TRIGGER)
|
||||
|
||||
127
homeassistant/components/binary_sensor/rachio.py
Normal file
127
homeassistant/components/binary_sensor/rachio.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""
|
||||
Integration with the Rachio Iro sprinkler system controller.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.rachio/
|
||||
"""
|
||||
from abc import abstractmethod
|
||||
import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.rachio import (DOMAIN as DOMAIN_RACHIO,
|
||||
KEY_DEVICE_ID,
|
||||
KEY_STATUS,
|
||||
KEY_SUBTYPE,
|
||||
SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
STATUS_OFFLINE,
|
||||
STATUS_ONLINE,
|
||||
SUBTYPE_OFFLINE,
|
||||
SUBTYPE_ONLINE,)
|
||||
from homeassistant.helpers.dispatcher import dispatcher_connect
|
||||
|
||||
DEPENDENCIES = ['rachio']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Rachio binary sensors."""
|
||||
devices = []
|
||||
for controller in hass.data[DOMAIN_RACHIO].controllers:
|
||||
devices.append(RachioControllerOnlineBinarySensor(hass, controller))
|
||||
|
||||
add_devices(devices)
|
||||
_LOGGER.info("%d Rachio binary sensor(s) added", len(devices))
|
||||
|
||||
|
||||
class RachioControllerBinarySensor(BinarySensorDevice):
|
||||
"""Represent a binary sensor that reflects a Rachio state."""
|
||||
|
||||
def __init__(self, hass, controller, poll=True):
|
||||
"""Set up a new Rachio controller binary sensor."""
|
||||
self._controller = controller
|
||||
|
||||
if poll:
|
||||
self._state = self._poll_update()
|
||||
else:
|
||||
self._state = None
|
||||
|
||||
dispatcher_connect(hass, SIGNAL_RACHIO_CONTROLLER_UPDATE,
|
||||
self._handle_any_update)
|
||||
|
||||
@property
|
||||
def should_poll(self) -> bool:
|
||||
"""Declare that this entity pushes its state to HA."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return whether the sensor has a 'true' value."""
|
||||
return self._state
|
||||
|
||||
def _handle_any_update(self, *args, **kwargs) -> None:
|
||||
"""Determine whether an update event applies to this device."""
|
||||
if args[0][KEY_DEVICE_ID] != self._controller.controller_id:
|
||||
# For another device
|
||||
return
|
||||
|
||||
# For this device
|
||||
self._handle_update()
|
||||
|
||||
@abstractmethod
|
||||
def _poll_update(self, data=None) -> bool:
|
||||
"""Request the state from the API."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _handle_update(self, *args, **kwargs) -> None:
|
||||
"""Handle an update to the state of this sensor."""
|
||||
pass
|
||||
|
||||
|
||||
class RachioControllerOnlineBinarySensor(RachioControllerBinarySensor):
|
||||
"""Represent a binary sensor that reflects if the controller is online."""
|
||||
|
||||
def __init__(self, hass, controller):
|
||||
"""Set up a new Rachio controller online binary sensor."""
|
||||
super().__init__(hass, controller, poll=False)
|
||||
self._state = self._poll_update(controller.init_data)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of this sensor including the controller name."""
|
||||
return "{} online".format(self._controller.name)
|
||||
|
||||
@property
|
||||
def device_class(self) -> str:
|
||||
"""Return the class of this device, from component DEVICE_CLASSES."""
|
||||
return 'connectivity'
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
"""Return the name of an icon for this sensor."""
|
||||
return 'mdi:wifi-strength-4' if self.is_on\
|
||||
else 'mdi:wifi-strength-off-outline'
|
||||
|
||||
def _poll_update(self, data=None) -> bool:
|
||||
"""Request the state from the API."""
|
||||
if data is None:
|
||||
data = self._controller.rachio.device.get(
|
||||
self._controller.controller_id)[1]
|
||||
|
||||
if data[KEY_STATUS] == STATUS_ONLINE:
|
||||
return True
|
||||
elif data[KEY_STATUS] == STATUS_OFFLINE:
|
||||
return False
|
||||
else:
|
||||
_LOGGER.warning('"%s" reported in unknown state "%s"', self.name,
|
||||
data[KEY_STATUS])
|
||||
|
||||
def _handle_update(self, *args, **kwargs) -> None:
|
||||
"""Handle an update to the state of this sensor."""
|
||||
if args[0][KEY_SUBTYPE] == SUBTYPE_ONLINE:
|
||||
self._state = True
|
||||
elif args[0][KEY_SUBTYPE] == SUBTYPE_OFFLINE:
|
||||
self._state = False
|
||||
|
||||
self.schedule_update_ha_state()
|
||||
@@ -8,7 +8,7 @@ import logging
|
||||
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
from homeassistant.components.rainmachine import (
|
||||
BINARY_SENSORS, DATA_RAINMACHINE, DATA_UPDATE_TOPIC, TYPE_FREEZE,
|
||||
BINARY_SENSORS, DATA_RAINMACHINE, SENSOR_UPDATE_TOPIC, TYPE_FREEZE,
|
||||
TYPE_FREEZE_PROTECTION, TYPE_HOT_DAYS, TYPE_HOURLY, TYPE_MONTH,
|
||||
TYPE_RAINDELAY, TYPE_RAINSENSOR, TYPE_WEEKDAY, RainMachineEntity)
|
||||
from homeassistant.const import CONF_MONITORED_CONDITIONS
|
||||
@@ -20,7 +20,8 @@ DEPENDENCIES = ['rainmachine']
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
async def async_setup_platform(
|
||||
hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the RainMachine Switch platform."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
@@ -33,7 +34,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
binary_sensors.append(
|
||||
RainMachineBinarySensor(rainmachine, sensor_type, name, icon))
|
||||
|
||||
add_devices(binary_sensors, True)
|
||||
async_add_devices(binary_sensors, True)
|
||||
|
||||
|
||||
class RainMachineBinarySensor(RainMachineEntity, BinarySensorDevice):
|
||||
@@ -70,16 +71,16 @@ class RainMachineBinarySensor(RainMachineEntity, BinarySensorDevice):
|
||||
self.rainmachine.device_mac.replace(':', ''), self._sensor_type)
|
||||
|
||||
@callback
|
||||
def update_data(self):
|
||||
def _update_data(self):
|
||||
"""Update the state."""
|
||||
self.async_schedule_update_ha_state(True)
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
async_dispatcher_connect(self.hass, DATA_UPDATE_TOPIC,
|
||||
self.update_data)
|
||||
async_dispatcher_connect(
|
||||
self.hass, SENSOR_UPDATE_TOPIC, self._update_data)
|
||||
|
||||
def update(self):
|
||||
async def async_update(self):
|
||||
"""Update the state."""
|
||||
if self._sensor_type == TYPE_FREEZE:
|
||||
self._state = self.rainmachine.restrictions['current']['freeze']
|
||||
|
||||
@@ -42,7 +42,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the raspihats binary_sensor devices."""
|
||||
I2CHatBinarySensor.I2C_HATS_MANAGER = hass.data[I2C_HATS_MANAGER]
|
||||
|
||||
@@ -23,7 +23,7 @@ DEPENDENCIES = ['ring']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=5)
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
# Sensor types: Name, category, device_class
|
||||
SENSOR_TYPES = {
|
||||
|
||||
@@ -39,7 +39,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Raspberry PI GPIO devices."""
|
||||
pull_mode = config.get(CONF_PULL_MODE)
|
||||
@@ -59,7 +58,6 @@ class RPiGPIOBinarySensor(BinarySensorDevice):
|
||||
|
||||
def __init__(self, name, port, pull_mode, bouncetime, invert_logic):
|
||||
"""Initialize the RPi binary sensor."""
|
||||
# pylint: disable=no-member
|
||||
self._name = name or DEVICE_DEFAULT_NAME
|
||||
self._port = port
|
||||
self._pull_mode = pull_mode
|
||||
|
||||
@@ -94,4 +94,4 @@ class SkybellBinarySensor(SkybellDevice, BinarySensorDevice):
|
||||
|
||||
self._state = bool(event and event.get('id') != self._event.get('id'))
|
||||
|
||||
self._event = event
|
||||
self._event = event or {}
|
||||
|
||||
@@ -23,7 +23,7 @@ from homeassistant.helpers.entity import generate_entity_id
|
||||
from homeassistant.helpers.event import async_track_state_change
|
||||
from homeassistant.util import utcnow
|
||||
|
||||
REQUIREMENTS = ['numpy==1.14.3']
|
||||
REQUIREMENTS = ['numpy==1.14.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,7 +57,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the trend sensors."""
|
||||
sensors = []
|
||||
|
||||
92
homeassistant/components/binary_sensor/uptimerobot.py
Normal file
92
homeassistant/components/binary_sensor/uptimerobot.py
Normal file
@@ -0,0 +1,92 @@
|
||||
"""
|
||||
A platform that to monitor Uptime Robot monitors.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://www.home-assistant.io/components/binary_sensor.uptimerobot/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
PLATFORM_SCHEMA, BinarySensorDevice)
|
||||
from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
REQUIREMENTS = ['pyuptimerobot==0.0.5']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_TARGET = 'target'
|
||||
|
||||
CONF_ATTRIBUTION = "Data provided by Uptime Robot"
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Uptime Robot binary_sensors."""
|
||||
from pyuptimerobot import UptimeRobot
|
||||
|
||||
up_robot = UptimeRobot()
|
||||
api_key = config.get(CONF_API_KEY)
|
||||
monitors = up_robot.getMonitors(api_key)
|
||||
|
||||
devices = []
|
||||
if not monitors or monitors.get('stat') != 'ok':
|
||||
_LOGGER.error("Error connecting to Uptime Robot")
|
||||
return
|
||||
|
||||
for monitor in monitors['monitors']:
|
||||
devices.append(UptimeRobotBinarySensor(
|
||||
api_key, up_robot, monitor['id'], monitor['friendly_name'],
|
||||
monitor['url']))
|
||||
|
||||
add_devices(devices, True)
|
||||
|
||||
|
||||
class UptimeRobotBinarySensor(BinarySensorDevice):
|
||||
"""Representation of a Uptime Robot binary sensor."""
|
||||
|
||||
def __init__(self, api_key, up_robot, monitor_id, name, target):
|
||||
"""Initialize Uptime Robot the binary sensor."""
|
||||
self._api_key = api_key
|
||||
self._monitor_id = str(monitor_id)
|
||||
self._name = name
|
||||
self._target = target
|
||||
self._up_robot = up_robot
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the binary sensor."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return the state of the binary sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of this device, from component DEVICE_CLASSES."""
|
||||
return 'connectivity'
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes of the binary sensor."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
|
||||
ATTR_TARGET: self._target,
|
||||
}
|
||||
|
||||
def update(self):
|
||||
"""Get the latest state of the binary sensor."""
|
||||
monitor = self._up_robot.getMonitors(self._api_key, self._monitor_id)
|
||||
if not monitor or monitor.get('stat') != 'ok':
|
||||
_LOGGER.warning("Failed to get new state")
|
||||
return
|
||||
status = monitor['monitors'][0]['status']
|
||||
self._state = 1 if status == 2 else 0
|
||||
@@ -19,8 +19,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Perform the setup for Vera controller devices."""
|
||||
add_devices(
|
||||
VeraBinarySensor(device, hass.data[VERA_CONTROLLER])
|
||||
for device in hass.data[VERA_DEVICES]['binary_sensor'])
|
||||
[VeraBinarySensor(device, hass.data[VERA_CONTROLLER])
|
||||
for device in hass.data[VERA_DEVICES]['binary_sensor']], True)
|
||||
|
||||
|
||||
class VeraBinarySensor(VeraDevice, BinarySensorDevice):
|
||||
|
||||
@@ -54,6 +54,7 @@ class VerisureDoorWindowSensor(BinarySensorDevice):
|
||||
"$.doorWindow.doorWindowDevice[?(@.deviceLabel=='%s')]",
|
||||
self._device_label) is not None
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def update(self):
|
||||
"""Update the state of the sensor."""
|
||||
hub.update_overview()
|
||||
|
||||
@@ -13,7 +13,6 @@ DEPENDENCIES = ['wemo']
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument, too-many-function-args
|
||||
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
|
||||
"""Register discovered WeMo binary sensors."""
|
||||
import pywemo.discovery as discovery
|
||||
|
||||
214
homeassistant/components/binary_sensor/wirelesstag.py
Normal file
214
homeassistant/components/binary_sensor/wirelesstag.py
Normal file
@@ -0,0 +1,214 @@
|
||||
"""
|
||||
Binary sensor support for Wireless Sensor Tags.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.wirelesstag/
|
||||
"""
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.wirelesstag import (
|
||||
DOMAIN as WIRELESSTAG_DOMAIN,
|
||||
WIRELESSTAG_TYPE_13BIT, WIRELESSTAG_TYPE_WATER,
|
||||
WIRELESSTAG_TYPE_ALSPRO,
|
||||
WIRELESSTAG_TYPE_WEMO_DEVICE,
|
||||
SIGNAL_BINARY_EVENT_UPDATE,
|
||||
WirelessTagBaseSensor)
|
||||
from homeassistant.const import (
|
||||
CONF_MONITORED_CONDITIONS, STATE_ON, STATE_OFF)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
DEPENDENCIES = ['wirelesstag']
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# On means in range, Off means out of range
|
||||
SENSOR_PRESENCE = 'presence'
|
||||
|
||||
# On means motion detected, Off means cear
|
||||
SENSOR_MOTION = 'motion'
|
||||
|
||||
# On means open, Off means closed
|
||||
SENSOR_DOOR = 'door'
|
||||
|
||||
# On means temperature become too cold, Off means normal
|
||||
SENSOR_COLD = 'cold'
|
||||
|
||||
# On means hot, Off means normal
|
||||
SENSOR_HEAT = 'heat'
|
||||
|
||||
# On means too dry (humidity), Off means normal
|
||||
SENSOR_DRY = 'dry'
|
||||
|
||||
# On means too wet (humidity), Off means normal
|
||||
SENSOR_WET = 'wet'
|
||||
|
||||
# On means light detected, Off means no light
|
||||
SENSOR_LIGHT = 'light'
|
||||
|
||||
# On means moisture detected (wet), Off means no moisture (dry)
|
||||
SENSOR_MOISTURE = 'moisture'
|
||||
|
||||
# On means tag battery is low, Off means normal
|
||||
SENSOR_BATTERY = 'low_battery'
|
||||
|
||||
# Sensor types: Name, device_class, push notification type representing 'on',
|
||||
# attr to check
|
||||
SENSOR_TYPES = {
|
||||
SENSOR_PRESENCE: ['Presence', 'presence', 'is_in_range', {
|
||||
"on": "oor",
|
||||
"off": "back_in_range"
|
||||
}, 2],
|
||||
SENSOR_MOTION: ['Motion', 'motion', 'is_moved', {
|
||||
"on": "motion_detected",
|
||||
}, 5],
|
||||
SENSOR_DOOR: ['Door', 'door', 'is_door_open', {
|
||||
"on": "door_opened",
|
||||
"off": "door_closed"
|
||||
}, 5],
|
||||
SENSOR_COLD: ['Cold', 'cold', 'is_cold', {
|
||||
"on": "temp_toolow",
|
||||
"off": "temp_normal"
|
||||
}, 4],
|
||||
SENSOR_HEAT: ['Heat', 'heat', 'is_heat', {
|
||||
"on": "temp_toohigh",
|
||||
"off": "temp_normal"
|
||||
}, 4],
|
||||
SENSOR_DRY: ['Too dry', 'dry', 'is_too_dry', {
|
||||
"on": "too_dry",
|
||||
"off": "cap_normal"
|
||||
}, 2],
|
||||
SENSOR_WET: ['Too wet', 'wet', 'is_too_humid', {
|
||||
"on": "too_humid",
|
||||
"off": "cap_normal"
|
||||
}, 2],
|
||||
SENSOR_LIGHT: ['Light', 'light', 'is_light_on', {
|
||||
"on": "too_bright",
|
||||
"off": "light_normal"
|
||||
}, 1],
|
||||
SENSOR_MOISTURE: ['Leak', 'moisture', 'is_leaking', {
|
||||
"on": "water_detected",
|
||||
"off": "water_dried",
|
||||
}, 1],
|
||||
SENSOR_BATTERY: ['Low Battery', 'battery', 'is_battery_low', {
|
||||
"on": "low_battery"
|
||||
}, 3]
|
||||
}
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_MONITORED_CONDITIONS, default=[]):
|
||||
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the platform for a WirelessTags."""
|
||||
platform = hass.data.get(WIRELESSTAG_DOMAIN)
|
||||
|
||||
sensors = []
|
||||
tags = platform.tags
|
||||
for tag in tags.values():
|
||||
allowed_sensor_types = WirelessTagBinarySensor.allowed_sensors(tag)
|
||||
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
|
||||
if sensor_type in allowed_sensor_types:
|
||||
sensors.append(WirelessTagBinarySensor(platform, tag,
|
||||
sensor_type))
|
||||
|
||||
add_devices(sensors, True)
|
||||
hass.add_job(platform.install_push_notifications, sensors)
|
||||
|
||||
|
||||
class WirelessTagBinarySensor(WirelessTagBaseSensor, BinarySensorDevice):
|
||||
"""A binary sensor implementation for WirelessTags."""
|
||||
|
||||
@classmethod
|
||||
def allowed_sensors(cls, tag):
|
||||
"""Return list of allowed sensor types for specific tag type."""
|
||||
sensors_map = {
|
||||
# 13-bit tag - allows everything but not light and moisture
|
||||
WIRELESSTAG_TYPE_13BIT: [
|
||||
SENSOR_PRESENCE, SENSOR_BATTERY,
|
||||
SENSOR_MOTION, SENSOR_DOOR,
|
||||
SENSOR_COLD, SENSOR_HEAT,
|
||||
SENSOR_DRY, SENSOR_WET],
|
||||
|
||||
# Moister/water sensor - temperature and moisture only
|
||||
WIRELESSTAG_TYPE_WATER: [
|
||||
SENSOR_PRESENCE, SENSOR_BATTERY,
|
||||
SENSOR_COLD, SENSOR_HEAT,
|
||||
SENSOR_MOISTURE],
|
||||
|
||||
# ALS Pro: allows everything, but not moisture
|
||||
WIRELESSTAG_TYPE_ALSPRO: [
|
||||
SENSOR_PRESENCE, SENSOR_BATTERY,
|
||||
SENSOR_MOTION, SENSOR_DOOR,
|
||||
SENSOR_COLD, SENSOR_HEAT,
|
||||
SENSOR_DRY, SENSOR_WET,
|
||||
SENSOR_LIGHT],
|
||||
|
||||
# Wemo are power switches.
|
||||
WIRELESSTAG_TYPE_WEMO_DEVICE: [SENSOR_PRESENCE]
|
||||
}
|
||||
|
||||
# allow everything if tag type is unknown
|
||||
# (i just dont have full catalog of them :))
|
||||
tag_type = tag.tag_type
|
||||
fullset = SENSOR_TYPES.keys()
|
||||
return sensors_map[tag_type] if tag_type in sensors_map else fullset
|
||||
|
||||
def __init__(self, api, tag, sensor_type):
|
||||
"""Initialize a binary sensor for a Wireless Sensor Tags."""
|
||||
super().__init__(api, tag)
|
||||
self._sensor_type = sensor_type
|
||||
self._name = '{0} {1}'.format(self._tag.name,
|
||||
SENSOR_TYPES[self._sensor_type][0])
|
||||
self._device_class = SENSOR_TYPES[self._sensor_type][1]
|
||||
self._tag_attr = SENSOR_TYPES[self._sensor_type][2]
|
||||
self.binary_spec = SENSOR_TYPES[self._sensor_type][3]
|
||||
self.tag_id_index_template = SENSOR_TYPES[self._sensor_type][4]
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
tag_id = self.tag_id
|
||||
event_type = self.device_class
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
SIGNAL_BINARY_EVENT_UPDATE.format(tag_id, event_type),
|
||||
self._on_binary_event_callback)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
"""Return True if the binary sensor is on."""
|
||||
return self._state == STATE_ON
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Return the class of the binary sensor."""
|
||||
return self._device_class
|
||||
|
||||
@property
|
||||
def principal_value(self):
|
||||
"""Return value of tag.
|
||||
|
||||
Subclasses need override based on type of sensor.
|
||||
"""
|
||||
return (
|
||||
STATE_ON if getattr(self._tag, self._tag_attr, False)
|
||||
else STATE_OFF)
|
||||
|
||||
def updated_state_value(self):
|
||||
"""Use raw princial value."""
|
||||
return self.principal_value
|
||||
|
||||
@callback
|
||||
def _on_binary_event_callback(self, event):
|
||||
"""Update state from arrive push notification."""
|
||||
# state should be 'on' or 'off'
|
||||
self._state = event.data.get('state')
|
||||
self.async_schedule_update_ha_state()
|
||||
@@ -28,7 +28,11 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
if model in ['motion', 'sensor_motion', 'sensor_motion.aq2']:
|
||||
devices.append(XiaomiMotionSensor(device, hass, gateway))
|
||||
elif model in ['magnet', 'sensor_magnet', 'sensor_magnet.aq2']:
|
||||
devices.append(XiaomiDoorSensor(device, gateway))
|
||||
if 'proto' not in device or int(device['proto'][0:1]) == 1:
|
||||
data_key = 'status'
|
||||
else:
|
||||
data_key = 'window_status'
|
||||
devices.append(XiaomiDoorSensor(device, data_key, gateway))
|
||||
elif model == 'sensor_wleak.aq1':
|
||||
devices.append(XiaomiWaterLeakSensor(device, gateway))
|
||||
elif model in ['smoke', 'sensor_smoke']:
|
||||
@@ -43,10 +47,10 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
data_key = 'channel_0'
|
||||
devices.append(XiaomiButton(device, 'Switch', data_key,
|
||||
hass, gateway))
|
||||
elif model in ['86sw1', 'sensor_86sw1.aq1']:
|
||||
elif model in ['86sw1', 'sensor_86sw1', 'sensor_86sw1.aq1']:
|
||||
devices.append(XiaomiButton(device, 'Wall Switch', 'channel_0',
|
||||
hass, gateway))
|
||||
elif model in ['86sw2', 'sensor_86sw2.aq1']:
|
||||
elif model in ['86sw2', 'sensor_86sw2', 'sensor_86sw2.aq1']:
|
||||
devices.append(XiaomiButton(device, 'Wall Switch (Left)',
|
||||
'channel_0', hass, gateway))
|
||||
devices.append(XiaomiButton(device, 'Wall Switch (Right)',
|
||||
@@ -190,11 +194,11 @@ class XiaomiMotionSensor(XiaomiBinarySensor):
|
||||
class XiaomiDoorSensor(XiaomiBinarySensor):
|
||||
"""Representation of a XiaomiDoorSensor."""
|
||||
|
||||
def __init__(self, device, xiaomi_hub):
|
||||
def __init__(self, device, data_key, xiaomi_hub):
|
||||
"""Initialize the XiaomiDoorSensor."""
|
||||
self._open_since = 0
|
||||
XiaomiBinarySensor.__init__(self, device, 'Door Window Sensor',
|
||||
xiaomi_hub, 'status', 'opening')
|
||||
xiaomi_hub, data_key, 'opening')
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
@@ -330,7 +334,7 @@ class XiaomiButton(XiaomiBinarySensor):
|
||||
click_type = 'both'
|
||||
elif value == 'shake':
|
||||
click_type = 'shake'
|
||||
elif value == 'long_click':
|
||||
elif value in ['long_click', 'long_both_click']:
|
||||
return False
|
||||
else:
|
||||
_LOGGER.warning("Unsupported click_type detected: %s", value)
|
||||
|
||||
@@ -187,8 +187,8 @@ class Switch(zha.Entity, BinarySensorDevice):
|
||||
if args[0] == 0xff:
|
||||
rate = 10 # Should read default move rate
|
||||
self._entity.move_level(-rate if args[0] else rate)
|
||||
elif command_id == 0x0002: # step
|
||||
# Step (technically shouldn't change on/off)
|
||||
elif command_id in (0x0002, 0x0006): # step, -with_on_off
|
||||
# Step (technically may change on/off)
|
||||
self._entity.move_level(-args[1] if args[0] else args[1])
|
||||
|
||||
def attribute_update(self, attrid, value):
|
||||
|
||||
@@ -34,7 +34,6 @@ CONFIG_SCHEMA = vol.Schema({
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup(hass, config):
|
||||
"""Set up the BloomSky component."""
|
||||
api_key = config[DOMAIN][CONF_API_KEY]
|
||||
|
||||
@@ -4,11 +4,12 @@ Support for Google Calendar event device sensors.
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/calendar/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
import re
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from homeassistant.components.google import (
|
||||
CONF_OFFSET, CONF_DEVICE_ID, CONF_NAME)
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
@@ -18,23 +19,33 @@ from homeassistant.helpers.entity import Entity, generate_entity_id
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.template import DATE_STR_FORMAT
|
||||
from homeassistant.util import dt
|
||||
from homeassistant.components import http
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'calendar'
|
||||
|
||||
DEPENDENCIES = ['http']
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
async def async_setup(hass, config):
|
||||
"""Track states and offer events for calendars."""
|
||||
component = EntityComponent(
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL, DOMAIN)
|
||||
|
||||
yield from component.async_setup(config)
|
||||
hass.http.register_view(CalendarListView(component))
|
||||
hass.http.register_view(CalendarEventView(component))
|
||||
|
||||
# Doesn't work in prod builds of the frontend: home-assistant-polymer#1289
|
||||
# await hass.components.frontend.async_register_built_in_panel(
|
||||
# 'calendar', 'calendar', 'hass:calendar')
|
||||
|
||||
await component.async_setup(config)
|
||||
return True
|
||||
|
||||
|
||||
@@ -42,7 +53,14 @@ DEFAULT_CONF_TRACK_NEW = True
|
||||
DEFAULT_CONF_OFFSET = '!!'
|
||||
|
||||
|
||||
# pylint: disable=too-many-instance-attributes
|
||||
def get_date(date):
|
||||
"""Get the dateTime from date or dateTime as a local."""
|
||||
if 'date' in date:
|
||||
return dt.start_of_local_day(dt.dt.datetime.combine(
|
||||
dt.parse_date(date['date']), dt.dt.time.min))
|
||||
return dt.as_local(dt.parse_datetime(date['dateTime']))
|
||||
|
||||
|
||||
class CalendarEventDevice(Entity):
|
||||
"""A calendar event device."""
|
||||
|
||||
@@ -50,7 +68,6 @@ class CalendarEventDevice(Entity):
|
||||
# with an update() method
|
||||
data = None
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def __init__(self, hass, data):
|
||||
"""Create the Calendar Event Device."""
|
||||
self._name = data.get(CONF_NAME)
|
||||
@@ -144,15 +161,8 @@ class CalendarEventDevice(Entity):
|
||||
self.cleanup()
|
||||
return
|
||||
|
||||
def _get_date(date):
|
||||
"""Get the dateTime from date or dateTime as a local."""
|
||||
if 'date' in date:
|
||||
return dt.start_of_local_day(dt.dt.datetime.combine(
|
||||
dt.parse_date(date['date']), dt.dt.time.min))
|
||||
return dt.as_local(dt.parse_datetime(date['dateTime']))
|
||||
|
||||
start = _get_date(self.data.event['start'])
|
||||
end = _get_date(self.data.event['end'])
|
||||
start = get_date(self.data.event['start'])
|
||||
end = get_date(self.data.event['end'])
|
||||
|
||||
summary = self.data.event.get('summary', '')
|
||||
|
||||
@@ -176,10 +186,61 @@ class CalendarEventDevice(Entity):
|
||||
|
||||
# cleanup the string so we don't have a bunch of double+ spaces
|
||||
self._cal_data['message'] = re.sub(' +', '', summary).strip()
|
||||
|
||||
self._cal_data['offset_time'] = offset_time
|
||||
self._cal_data['location'] = self.data.event.get('location', '')
|
||||
self._cal_data['description'] = self.data.event.get('description', '')
|
||||
self._cal_data['start'] = start
|
||||
self._cal_data['end'] = end
|
||||
self._cal_data['all_day'] = 'date' in self.data.event['start']
|
||||
|
||||
|
||||
class CalendarEventView(http.HomeAssistantView):
|
||||
"""View to retrieve calendar content."""
|
||||
|
||||
url = '/api/calendars/{entity_id}'
|
||||
name = 'api:calendars:calendar'
|
||||
|
||||
def __init__(self, component):
|
||||
"""Initialize calendar view."""
|
||||
self.component = component
|
||||
|
||||
async def get(self, request, entity_id):
|
||||
"""Return calendar events."""
|
||||
entity = self.component.get_entity(entity_id)
|
||||
start = request.query.get('start')
|
||||
end = request.query.get('end')
|
||||
if None in (start, end, entity):
|
||||
return web.Response(status=400)
|
||||
try:
|
||||
start_date = dt.parse_datetime(start)
|
||||
end_date = dt.parse_datetime(end)
|
||||
except (ValueError, AttributeError):
|
||||
return web.Response(status=400)
|
||||
event_list = await entity.async_get_events(
|
||||
request.app['hass'], start_date, end_date)
|
||||
return self.json(event_list)
|
||||
|
||||
|
||||
class CalendarListView(http.HomeAssistantView):
|
||||
"""View to retrieve calendar list."""
|
||||
|
||||
url = '/api/calendars'
|
||||
name = "api:calendars"
|
||||
|
||||
def __init__(self, component):
|
||||
"""Initialize calendar view."""
|
||||
self.component = component
|
||||
|
||||
async def get(self, request):
|
||||
"""Retrieve calendar list."""
|
||||
get_state = request.app['hass'].states.get
|
||||
calendar_list = []
|
||||
|
||||
for entity in self.component.entities:
|
||||
state = get_state(entity.entity_id)
|
||||
calendar_list.append({
|
||||
"name": state.name,
|
||||
"entity_id": entity.entity_id,
|
||||
})
|
||||
|
||||
return self.json(sorted(calendar_list, key=lambda x: x['name']))
|
||||
|
||||
@@ -11,7 +11,7 @@ import re
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.calendar import (
|
||||
PLATFORM_SCHEMA, CalendarEventDevice)
|
||||
PLATFORM_SCHEMA, CalendarEventDevice, get_date)
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
@@ -92,7 +92,7 @@ def setup_platform(hass, config, add_devices, disc_info=None):
|
||||
if not config.get(CONF_CUSTOM_CALENDARS):
|
||||
device_data = {
|
||||
CONF_NAME: calendar.name,
|
||||
CONF_DEVICE_ID: calendar.name
|
||||
CONF_DEVICE_ID: calendar.name,
|
||||
}
|
||||
calendar_devices.append(
|
||||
WebDavCalendarEventDevice(hass, device_data, calendar)
|
||||
@@ -120,6 +120,10 @@ class WebDavCalendarEventDevice(CalendarEventDevice):
|
||||
attributes = super().device_state_attributes
|
||||
return attributes
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
return await self.data.async_get_events(hass, start_date, end_date)
|
||||
|
||||
|
||||
class WebDavCalendarData(object):
|
||||
"""Class to utilize the calendar dav client object to get next event."""
|
||||
@@ -131,6 +135,33 @@ class WebDavCalendarData(object):
|
||||
self.search = search
|
||||
self.event = None
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
# Get event list from the current calendar
|
||||
vevent_list = await hass.async_add_job(self.calendar.date_search,
|
||||
start_date, end_date)
|
||||
event_list = []
|
||||
for event in vevent_list:
|
||||
vevent = event.instance.vevent
|
||||
uid = None
|
||||
if hasattr(vevent, 'uid'):
|
||||
uid = vevent.uid.value
|
||||
data = {
|
||||
"uid": uid,
|
||||
"title": vevent.summary.value,
|
||||
"start": self.get_hass_date(vevent.dtstart.value),
|
||||
"end": self.get_hass_date(self.get_end_date(vevent)),
|
||||
"location": self.get_attr_value(vevent, "location"),
|
||||
"description": self.get_attr_value(vevent, "description"),
|
||||
}
|
||||
|
||||
data['start'] = get_date(data['start']).isoformat()
|
||||
data['end'] = get_date(data['end']).isoformat()
|
||||
|
||||
event_list.append(data)
|
||||
|
||||
return event_list
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data."""
|
||||
|
||||
@@ -4,8 +4,10 @@ Demo platform that has two fake binary sensors.
|
||||
For more details about this platform, please refer to the documentation
|
||||
https://home-assistant.io/components/demo/
|
||||
"""
|
||||
import copy
|
||||
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.components.calendar import CalendarEventDevice
|
||||
from homeassistant.components.calendar import CalendarEventDevice, get_date
|
||||
from homeassistant.components.google import CONF_DEVICE_ID, CONF_NAME
|
||||
|
||||
|
||||
@@ -15,13 +17,13 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
calendar_data_current = DemoGoogleCalendarDataCurrent()
|
||||
add_devices([
|
||||
DemoGoogleCalendar(hass, calendar_data_future, {
|
||||
CONF_NAME: 'Future Event',
|
||||
CONF_DEVICE_ID: 'future_event',
|
||||
CONF_NAME: 'Calendar 1',
|
||||
CONF_DEVICE_ID: 'calendar_1',
|
||||
}),
|
||||
|
||||
DemoGoogleCalendar(hass, calendar_data_current, {
|
||||
CONF_NAME: 'Current Event',
|
||||
CONF_DEVICE_ID: 'current_event',
|
||||
CONF_NAME: 'Calendar 2',
|
||||
CONF_DEVICE_ID: 'calendar_2',
|
||||
}),
|
||||
])
|
||||
|
||||
@@ -29,11 +31,21 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
class DemoGoogleCalendarData(object):
|
||||
"""Representation of a Demo Calendar element."""
|
||||
|
||||
event = {}
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def update(self):
|
||||
"""Return true so entity knows we have new data."""
|
||||
return True
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
event = copy.copy(self.event)
|
||||
event['title'] = event['summary']
|
||||
event['start'] = get_date(event['start']).isoformat()
|
||||
event['end'] = get_date(event['end']).isoformat()
|
||||
return [event]
|
||||
|
||||
|
||||
class DemoGoogleCalendarDataFuture(DemoGoogleCalendarData):
|
||||
"""Representation of a Demo Calendar for a future event."""
|
||||
@@ -80,3 +92,7 @@ class DemoGoogleCalendar(CalendarEventDevice):
|
||||
"""Initialize Google Calendar but without the API calls."""
|
||||
self.data = calendar_data
|
||||
super().__init__(hass, data)
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
return await self.data.async_get_events(hass, start_date, end_date)
|
||||
|
||||
@@ -4,7 +4,6 @@ Support for Google Calendar Search binary sensors.
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/binary_sensor.google_calendar/
|
||||
"""
|
||||
# pylint: disable=import-error
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
@@ -51,6 +50,10 @@ class GoogleCalendarEventDevice(CalendarEventDevice):
|
||||
|
||||
super().__init__(hass, data)
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
return await self.data.async_get_events(hass, start_date, end_date)
|
||||
|
||||
|
||||
class GoogleCalendarData(object):
|
||||
"""Class to utilize calendar service object to get next event."""
|
||||
@@ -64,9 +67,7 @@ class GoogleCalendarData(object):
|
||||
self.ignore_availability = ignore_availability
|
||||
self.event = None
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data."""
|
||||
def _prepare_query(self):
|
||||
from httplib2 import ServerNotFoundError
|
||||
|
||||
try:
|
||||
@@ -74,14 +75,40 @@ class GoogleCalendarData(object):
|
||||
except ServerNotFoundError:
|
||||
_LOGGER.warning("Unable to connect to Google, using cached data")
|
||||
return False
|
||||
|
||||
params = dict(DEFAULT_GOOGLE_SEARCH_PARAMS)
|
||||
params['timeMin'] = dt.now().isoformat('T')
|
||||
params['calendarId'] = self.calendar_id
|
||||
if self.search:
|
||||
params['q'] = self.search
|
||||
|
||||
events = service.events() # pylint: disable=no-member
|
||||
return service, params
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
service, params = await hass.async_add_job(self._prepare_query)
|
||||
params['timeMin'] = start_date.isoformat('T')
|
||||
params['timeMax'] = end_date.isoformat('T')
|
||||
|
||||
events = await hass.async_add_job(service.events)
|
||||
result = await hass.async_add_job(events.list(**params).execute)
|
||||
|
||||
items = result.get('items', [])
|
||||
event_list = []
|
||||
for item in items:
|
||||
if (not self.ignore_availability
|
||||
and 'transparency' in item.keys()):
|
||||
if item['transparency'] == 'opaque':
|
||||
event_list.append(item)
|
||||
else:
|
||||
event_list.append(item)
|
||||
return event_list
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data."""
|
||||
service, params = self._prepare_query()
|
||||
params['timeMin'] = dt.now().isoformat('T')
|
||||
|
||||
events = service.events()
|
||||
result = events.list(**params).execute()
|
||||
|
||||
items = result.get('items', [])
|
||||
|
||||
@@ -257,6 +257,10 @@ class TodoistProjectDevice(CalendarEventDevice):
|
||||
super().cleanup()
|
||||
self._cal_data[ALL_TASKS] = []
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all events in a specific time frame."""
|
||||
return await self.data.async_get_events(hass, start_date, end_date)
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the device state attributes."""
|
||||
@@ -485,6 +489,31 @@ class TodoistProjectData(object):
|
||||
continue
|
||||
return event
|
||||
|
||||
async def async_get_events(self, hass, start_date, end_date):
|
||||
"""Get all tasks in a specific time frame."""
|
||||
if self._id is None:
|
||||
project_task_data = [
|
||||
task for task in self._api.state[TASKS]
|
||||
if not self._project_id_whitelist or
|
||||
task[PROJECT_ID] in self._project_id_whitelist]
|
||||
else:
|
||||
project_task_data = self._api.projects.get_data(self._id)[TASKS]
|
||||
|
||||
events = []
|
||||
time_format = '%a %d %b %Y %H:%M:%S %z'
|
||||
for task in project_task_data:
|
||||
due_date = datetime.strptime(task['due_date_utc'], time_format)
|
||||
if due_date > start_date and due_date < end_date:
|
||||
event = {
|
||||
'uid': task['id'],
|
||||
'title': task['content'],
|
||||
'start': due_date.isoformat(),
|
||||
'end': due_date.isoformat(),
|
||||
'allDay': True,
|
||||
}
|
||||
events.append(event)
|
||||
return events
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
def update(self):
|
||||
"""Get the latest data."""
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
# pylint: disable=too-many-lines
|
||||
"""
|
||||
Component to interface with cameras.
|
||||
|
||||
@@ -67,8 +66,8 @@ CAMERA_SERVICE_SNAPSHOT = CAMERA_SERVICE_SCHEMA.extend({
|
||||
|
||||
WS_TYPE_CAMERA_THUMBNAIL = 'camera_thumbnail'
|
||||
SCHEMA_WS_CAMERA_THUMBNAIL = websocket_api.BASE_COMMAND_MESSAGE_SCHEMA.extend({
|
||||
'type': WS_TYPE_CAMERA_THUMBNAIL,
|
||||
'entity_id': cv.entity_id
|
||||
vol.Required('type'): WS_TYPE_CAMERA_THUMBNAIL,
|
||||
vol.Required('entity_id'): cv.entity_id
|
||||
})
|
||||
|
||||
|
||||
@@ -97,6 +96,7 @@ def disable_motion_detection(hass, entity_id=None):
|
||||
|
||||
|
||||
@bind_hass
|
||||
@callback
|
||||
def async_snapshot(hass, filename, entity_id=None):
|
||||
"""Make a snapshot from a camera."""
|
||||
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
|
||||
@@ -129,8 +129,7 @@ async def async_get_image(hass, entity_id, timeout=10):
|
||||
raise HomeAssistantError('Unable to get image')
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_setup(hass, config):
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the camera component."""
|
||||
component = hass.data[DOMAIN] = \
|
||||
EntityComponent(_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
|
||||
@@ -142,7 +141,7 @@ def async_setup(hass, config):
|
||||
SCHEMA_WS_CAMERA_THUMBNAIL
|
||||
)
|
||||
|
||||
yield from component.async_setup(config)
|
||||
await component.async_setup(config)
|
||||
|
||||
@callback
|
||||
def update_tokens(time):
|
||||
@@ -154,27 +153,25 @@ def async_setup(hass, config):
|
||||
hass.helpers.event.async_track_time_interval(
|
||||
update_tokens, TOKEN_CHANGE_INTERVAL)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_handle_camera_service(service):
|
||||
async def async_handle_camera_service(service):
|
||||
"""Handle calls to the camera services."""
|
||||
target_cameras = component.async_extract_from_service(service)
|
||||
|
||||
update_tasks = []
|
||||
for camera in target_cameras:
|
||||
if service.service == SERVICE_ENABLE_MOTION:
|
||||
yield from camera.async_enable_motion_detection()
|
||||
await camera.async_enable_motion_detection()
|
||||
elif service.service == SERVICE_DISABLE_MOTION:
|
||||
yield from camera.async_disable_motion_detection()
|
||||
await camera.async_disable_motion_detection()
|
||||
|
||||
if not camera.should_poll:
|
||||
continue
|
||||
update_tasks.append(camera.async_update_ha_state(True))
|
||||
|
||||
if update_tasks:
|
||||
yield from asyncio.wait(update_tasks, loop=hass.loop)
|
||||
await asyncio.wait(update_tasks, loop=hass.loop)
|
||||
|
||||
@asyncio.coroutine
|
||||
def async_handle_snapshot_service(service):
|
||||
async def async_handle_snapshot_service(service):
|
||||
"""Handle snapshot services calls."""
|
||||
target_cameras = component.async_extract_from_service(service)
|
||||
filename = service.data[ATTR_FILENAME]
|
||||
@@ -190,7 +187,7 @@ def async_setup(hass, config):
|
||||
"Can't write %s, no access to path!", snapshot_file)
|
||||
continue
|
||||
|
||||
image = yield from camera.async_camera_image()
|
||||
image = await camera.async_camera_image()
|
||||
|
||||
def _write_image(to_file, image_data):
|
||||
"""Executor helper to write image."""
|
||||
@@ -198,7 +195,7 @@ def async_setup(hass, config):
|
||||
img_file.write(image_data)
|
||||
|
||||
try:
|
||||
yield from hass.async_add_job(
|
||||
await hass.async_add_job(
|
||||
_write_image, snapshot_file, image)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write image to file: %s", err)
|
||||
@@ -216,6 +213,16 @@ def async_setup(hass, config):
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry):
|
||||
"""Setup a config entry."""
|
||||
return await hass.data[DOMAIN].async_setup_entry(entry)
|
||||
|
||||
|
||||
async def async_unload_entry(hass, entry):
|
||||
"""Unload a config entry."""
|
||||
return await hass.data[DOMAIN].async_unload_entry(entry)
|
||||
|
||||
|
||||
class Camera(Entity):
|
||||
"""The base class for camera entities."""
|
||||
|
||||
@@ -265,6 +272,7 @@ class Camera(Entity):
|
||||
"""Return bytes of camera image."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@callback
|
||||
def async_camera_image(self):
|
||||
"""Return bytes of camera image.
|
||||
|
||||
@@ -314,6 +322,7 @@ class Camera(Entity):
|
||||
except asyncio.CancelledError:
|
||||
_LOGGER.debug("Stream closed by frontend.")
|
||||
response = None
|
||||
raise
|
||||
|
||||
finally:
|
||||
if response is not None:
|
||||
@@ -388,8 +397,7 @@ class CameraView(HomeAssistantView):
|
||||
"""Initialize a basic camera view."""
|
||||
self.component = component
|
||||
|
||||
@asyncio.coroutine
|
||||
def get(self, request, entity_id):
|
||||
async def get(self, request, entity_id):
|
||||
"""Start a GET request."""
|
||||
camera = self.component.get_entity(entity_id)
|
||||
|
||||
@@ -403,11 +411,10 @@ class CameraView(HomeAssistantView):
|
||||
if not authenticated:
|
||||
return web.Response(status=401)
|
||||
|
||||
response = yield from self.handle(request, camera)
|
||||
response = await self.handle(request, camera)
|
||||
return response
|
||||
|
||||
@asyncio.coroutine
|
||||
def handle(self, request, camera):
|
||||
async def handle(self, request, camera):
|
||||
"""Handle the camera request."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@@ -418,12 +425,11 @@ class CameraImageView(CameraView):
|
||||
url = '/api/camera_proxy/{entity_id}'
|
||||
name = 'api:camera:image'
|
||||
|
||||
@asyncio.coroutine
|
||||
def handle(self, request, camera):
|
||||
async def handle(self, request, camera):
|
||||
"""Serve camera image."""
|
||||
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
|
||||
with async_timeout.timeout(10, loop=request.app['hass'].loop):
|
||||
image = yield from camera.async_camera_image()
|
||||
image = await camera.async_camera_image()
|
||||
|
||||
if image:
|
||||
return web.Response(body=image,
|
||||
|
||||
@@ -4,23 +4,22 @@ Support for Netgear Arlo IP cameras.
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/camera.arlo/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import timedelta
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.core import callback
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.components.arlo import DEFAULT_BRAND, DATA_ARLO
|
||||
from homeassistant.components.arlo import (
|
||||
DEFAULT_BRAND, DATA_ARLO, SIGNAL_UPDATE_ARLO)
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
|
||||
from homeassistant.components.ffmpeg import DATA_FFMPEG
|
||||
from homeassistant.const import ATTR_BATTERY_LEVEL
|
||||
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=90)
|
||||
|
||||
ARLO_MODE_ARMED = 'armed'
|
||||
ARLO_MODE_DISARMED = 'disarmed'
|
||||
|
||||
@@ -44,22 +43,19 @@ POWERSAVE_MODE_MAPPING = {
|
||||
}
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_FFMPEG_ARGUMENTS):
|
||||
cv.string,
|
||||
vol.Optional(CONF_FFMPEG_ARGUMENTS): cv.string,
|
||||
})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up an Arlo IP Camera."""
|
||||
arlo = hass.data.get(DATA_ARLO)
|
||||
if not arlo:
|
||||
return False
|
||||
arlo = hass.data[DATA_ARLO]
|
||||
|
||||
cameras = []
|
||||
for camera in arlo.cameras:
|
||||
cameras.append(ArloCam(hass, camera, config))
|
||||
|
||||
add_devices(cameras, True)
|
||||
add_devices(cameras)
|
||||
|
||||
|
||||
class ArloCam(Camera):
|
||||
@@ -74,31 +70,41 @@ class ArloCam(Camera):
|
||||
self._ffmpeg = hass.data[DATA_FFMPEG]
|
||||
self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS)
|
||||
self._last_refresh = None
|
||||
if self._camera.base_station:
|
||||
self._camera.base_station.refresh_rate = \
|
||||
SCAN_INTERVAL.total_seconds()
|
||||
self.attrs = {}
|
||||
|
||||
def camera_image(self):
|
||||
"""Return a still image response from the camera."""
|
||||
return self._camera.last_image
|
||||
return self._camera.last_image_from_cache
|
||||
|
||||
@asyncio.coroutine
|
||||
def handle_async_mjpeg_stream(self, request):
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
async_dispatcher_connect(
|
||||
self.hass, SIGNAL_UPDATE_ARLO, self._update_callback)
|
||||
|
||||
@callback
|
||||
def _update_callback(self):
|
||||
"""Call update method."""
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
async def handle_async_mjpeg_stream(self, request):
|
||||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
from haffmpeg import CameraMjpeg
|
||||
video = self._camera.last_video
|
||||
if not video:
|
||||
error_msg = \
|
||||
'Video not found for {0}. Is it older than {1} days?'.format(
|
||||
self.name, self._camera.min_days_vdo_cache)
|
||||
_LOGGER.error(error_msg)
|
||||
return
|
||||
|
||||
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
|
||||
yield from stream.open_camera(
|
||||
await stream.open_camera(
|
||||
video.video_url, extra_cmd=self._ffmpeg_arguments)
|
||||
|
||||
yield from async_aiohttp_proxy_stream(
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
yield from stream.close()
|
||||
await stream.close()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -132,11 +138,6 @@ class ArloCam(Camera):
|
||||
"""Return the camera brand."""
|
||||
return DEFAULT_BRAND
|
||||
|
||||
@property
|
||||
def should_poll(self):
|
||||
"""Camera should poll periodically."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def motion_detection_enabled(self):
|
||||
"""Return the camera motion detection status."""
|
||||
@@ -164,7 +165,3 @@ class ArloCam(Camera):
|
||||
"""Disable the motion detection in base station (Disarm)."""
|
||||
self._motion_status = False
|
||||
self.set_base_station_mode(ARLO_MODE_DISARMED)
|
||||
|
||||
def update(self):
|
||||
"""Add an attribute-update task to the executor pool."""
|
||||
self._camera.update()
|
||||
|
||||
@@ -13,7 +13,6 @@ from homeassistant.components.camera import Camera
|
||||
DEPENDENCIES = ['bloomsky']
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up access to BloomSky cameras."""
|
||||
bloomsky = hass.components.bloomsky
|
||||
|
||||
@@ -12,9 +12,10 @@ from homeassistant.components.camera import Camera
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_devices,
|
||||
discovery_info=None):
|
||||
"""Set up the Demo camera platform."""
|
||||
add_devices([
|
||||
async_add_devices([
|
||||
DemoCamera(hass, config, 'Demo camera')
|
||||
])
|
||||
|
||||
|
||||
@@ -17,9 +17,9 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
DEPENDENCIES = ['doorbird']
|
||||
|
||||
_CAMERA_LAST_VISITOR = "DoorBird Last Ring"
|
||||
_CAMERA_LAST_MOTION = "DoorBird Last Motion"
|
||||
_CAMERA_LIVE = "DoorBird Live"
|
||||
_CAMERA_LAST_VISITOR = "{} Last Ring"
|
||||
_CAMERA_LAST_MOTION = "{} Last Motion"
|
||||
_CAMERA_LIVE = "{} Live"
|
||||
_LAST_VISITOR_INTERVAL = datetime.timedelta(minutes=1)
|
||||
_LAST_MOTION_INTERVAL = datetime.timedelta(minutes=1)
|
||||
_LIVE_INTERVAL = datetime.timedelta(seconds=1)
|
||||
@@ -30,16 +30,22 @@ _TIMEOUT = 10 # seconds
|
||||
@asyncio.coroutine
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the DoorBird camera platform."""
|
||||
device = hass.data.get(DOORBIRD_DOMAIN)
|
||||
async_add_devices([
|
||||
DoorBirdCamera(device.live_image_url, _CAMERA_LIVE, _LIVE_INTERVAL),
|
||||
DoorBirdCamera(
|
||||
device.history_image_url(1, 'doorbell'), _CAMERA_LAST_VISITOR,
|
||||
_LAST_VISITOR_INTERVAL),
|
||||
DoorBirdCamera(
|
||||
device.history_image_url(1, 'motionsensor'), _CAMERA_LAST_MOTION,
|
||||
_LAST_MOTION_INTERVAL),
|
||||
])
|
||||
for doorstation in hass.data[DOORBIRD_DOMAIN]:
|
||||
device = doorstation.device
|
||||
async_add_devices([
|
||||
DoorBirdCamera(
|
||||
device.live_image_url,
|
||||
_CAMERA_LIVE.format(doorstation.name),
|
||||
_LIVE_INTERVAL),
|
||||
DoorBirdCamera(
|
||||
device.history_image_url(1, 'doorbell'),
|
||||
_CAMERA_LAST_VISITOR.format(doorstation.name),
|
||||
_LAST_VISITOR_INTERVAL),
|
||||
DoorBirdCamera(
|
||||
device.history_image_url(1, 'motionsensor'),
|
||||
_CAMERA_LAST_MOTION.format(doorstation.name),
|
||||
_LAST_MOTION_INTERVAL),
|
||||
])
|
||||
|
||||
|
||||
class DoorBirdCamera(Camera):
|
||||
|
||||
@@ -33,7 +33,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up a Foscam IP Camera."""
|
||||
add_devices([FoscamCam(config)])
|
||||
|
||||
@@ -46,7 +46,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
# pylint: disable=unused-argument
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up a generic IP Camera."""
|
||||
async_add_devices([GenericCamera(hass, config)])
|
||||
|
||||
@@ -42,7 +42,6 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
# pylint: disable=unused-argument
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up a MJPEG IP Camera."""
|
||||
if discovery_info:
|
||||
|
||||
@@ -10,12 +10,13 @@ from datetime import timedelta
|
||||
from homeassistant.components.camera import Camera
|
||||
from homeassistant.components.neato import (
|
||||
NEATO_MAP_DATA, NEATO_ROBOTS, NEATO_LOGIN)
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEPENDENCIES = ['neato']
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=10)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up the Neato Camera."""
|
||||
@@ -45,7 +46,6 @@ class NeatoCleaningMap(Camera):
|
||||
self.update()
|
||||
return self._image
|
||||
|
||||
@Throttle(timedelta(seconds=10))
|
||||
def update(self):
|
||||
"""Check the contents of the map list."""
|
||||
self.neato.update_robots()
|
||||
|
||||
@@ -23,14 +23,19 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up a Nest Cam."""
|
||||
if discovery_info is None:
|
||||
return
|
||||
"""Set up a Nest Cam.
|
||||
|
||||
camera_devices = hass.data[nest.DATA_NEST].cameras()
|
||||
No longer in use.
|
||||
"""
|
||||
|
||||
|
||||
async def async_setup_entry(hass, entry, async_add_devices):
|
||||
"""Set up a Nest sensor based on a config entry."""
|
||||
camera_devices = \
|
||||
await hass.async_add_job(hass.data[nest.DATA_NEST].cameras)
|
||||
cameras = [NestCamera(structure, device)
|
||||
for structure, device in camera_devices]
|
||||
add_devices(cameras, True)
|
||||
async_add_devices(cameras, True)
|
||||
|
||||
|
||||
class NestCamera(Camera):
|
||||
|
||||
@@ -29,13 +29,12 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
"""Set up access to Netatmo cameras."""
|
||||
netatmo = hass.components.netatmo
|
||||
home = config.get(CONF_HOME)
|
||||
verify_ssl = config.get(CONF_VERIFY_SSL, True)
|
||||
import lnetatmo
|
||||
import pyatmo
|
||||
try:
|
||||
data = CameraData(netatmo.NETATMO_AUTH, home)
|
||||
for camera_name in data.get_camera_names():
|
||||
@@ -46,7 +45,7 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
continue
|
||||
add_devices([NetatmoCamera(data, camera_name, home,
|
||||
camera_type, verify_ssl)])
|
||||
except lnetatmo.NoDevice:
|
||||
except pyatmo.NoDevice:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -25,9 +25,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
REQUIREMENTS = ['onvif-py3==0.1.3',
|
||||
'suds-py3==1.3.3.0',
|
||||
'http://github.com/tgaugry/suds-passworddigest-py3'
|
||||
'/archive/86fc50e39b4d2b8997481967d6a7fe1c57118999.zip'
|
||||
'#suds-passworddigest-py3==0.1.2a']
|
||||
'suds-passworddigest-homeassistant==0.1.2a0.dev0']
|
||||
DEPENDENCIES = ['ffmpeg']
|
||||
DEFAULT_NAME = 'ONVIF Camera'
|
||||
DEFAULT_PORT = 5000
|
||||
|
||||
@@ -233,6 +233,7 @@ class ProxyCamera(Camera):
|
||||
_LOGGER.debug("Stream closed by frontend.")
|
||||
req.close()
|
||||
response = None
|
||||
raise
|
||||
|
||||
finally:
|
||||
if response is not None:
|
||||
|
||||
170
homeassistant/components/camera/push.py
Normal file
170
homeassistant/components/camera/push.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
Camera platform that receives images through HTTP POST.
|
||||
|
||||
For more details about this platform, please refer to the documentation
|
||||
https://home-assistant.io/components/camera.push/
|
||||
"""
|
||||
import logging
|
||||
|
||||
from collections import deque
|
||||
from datetime import timedelta
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA,\
|
||||
STATE_IDLE, STATE_RECORDING
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.components.http.view import HomeAssistantView
|
||||
from homeassistant.const import CONF_NAME, CONF_TIMEOUT, HTTP_BAD_REQUEST
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_BUFFER_SIZE = 'buffer'
|
||||
CONF_IMAGE_FIELD = 'field'
|
||||
|
||||
DEFAULT_NAME = "Push Camera"
|
||||
|
||||
ATTR_FILENAME = 'filename'
|
||||
ATTR_LAST_TRIP = 'last_trip'
|
||||
|
||||
PUSH_CAMERA_DATA = 'push_camera'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_BUFFER_SIZE, default=1): cv.positive_int,
|
||||
vol.Optional(CONF_TIMEOUT, default=timedelta(seconds=5)): vol.All(
|
||||
cv.time_period, cv.positive_timedelta),
|
||||
vol.Optional(CONF_IMAGE_FIELD, default='image'): cv.string,
|
||||
})
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_devices,
|
||||
discovery_info=None):
|
||||
"""Set up the Push Camera platform."""
|
||||
if PUSH_CAMERA_DATA not in hass.data:
|
||||
hass.data[PUSH_CAMERA_DATA] = {}
|
||||
|
||||
cameras = [PushCamera(config[CONF_NAME],
|
||||
config[CONF_BUFFER_SIZE],
|
||||
config[CONF_TIMEOUT])]
|
||||
|
||||
hass.http.register_view(CameraPushReceiver(hass,
|
||||
config[CONF_IMAGE_FIELD]))
|
||||
|
||||
async_add_devices(cameras)
|
||||
|
||||
|
||||
class CameraPushReceiver(HomeAssistantView):
|
||||
"""Handle pushes from remote camera."""
|
||||
|
||||
url = "/api/camera_push/{entity_id}"
|
||||
name = 'api:camera_push:camera_entity'
|
||||
|
||||
def __init__(self, hass, image_field):
|
||||
"""Initialize CameraPushReceiver with camera entity."""
|
||||
self._cameras = hass.data[PUSH_CAMERA_DATA]
|
||||
self._image = image_field
|
||||
|
||||
async def post(self, request, entity_id):
|
||||
"""Accept the POST from Camera."""
|
||||
_camera = self._cameras.get(entity_id)
|
||||
|
||||
if _camera is None:
|
||||
_LOGGER.error("Unknown %s", entity_id)
|
||||
return self.json_message('Unknown {}'.format(entity_id),
|
||||
HTTP_BAD_REQUEST)
|
||||
|
||||
try:
|
||||
data = await request.post()
|
||||
_LOGGER.debug("Received Camera push: %s", data[self._image])
|
||||
await _camera.update_image(data[self._image].file.read(),
|
||||
data[self._image].filename)
|
||||
except ValueError as value_error:
|
||||
_LOGGER.error("Unknown value %s", value_error)
|
||||
return self.json_message('Invalid POST', HTTP_BAD_REQUEST)
|
||||
except KeyError as key_error:
|
||||
_LOGGER.error('In your POST message %s', key_error)
|
||||
return self.json_message('{} missing'.format(self._image),
|
||||
HTTP_BAD_REQUEST)
|
||||
|
||||
|
||||
class PushCamera(Camera):
|
||||
"""The representation of a Push camera."""
|
||||
|
||||
def __init__(self, name, buffer_size, timeout):
|
||||
"""Initialize push camera component."""
|
||||
super().__init__()
|
||||
self._name = name
|
||||
self._last_trip = None
|
||||
self._filename = None
|
||||
self._expired_listener = None
|
||||
self._state = STATE_IDLE
|
||||
self._timeout = timeout
|
||||
self.queue = deque([], buffer_size)
|
||||
self._current_image = None
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Call when entity is added to hass."""
|
||||
self.hass.data[PUSH_CAMERA_DATA][self.entity_id] = self
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Current state of the camera."""
|
||||
return self._state
|
||||
|
||||
async def update_image(self, image, filename):
|
||||
"""Update the camera image."""
|
||||
if self._state == STATE_IDLE:
|
||||
self._state = STATE_RECORDING
|
||||
self._last_trip = dt_util.utcnow()
|
||||
self.queue.clear()
|
||||
|
||||
self._filename = filename
|
||||
self.queue.appendleft(image)
|
||||
|
||||
@callback
|
||||
def reset_state(now):
|
||||
"""Set state to idle after no new images for a period of time."""
|
||||
self._state = STATE_IDLE
|
||||
self._expired_listener = None
|
||||
_LOGGER.debug("Reset state")
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
if self._expired_listener:
|
||||
self._expired_listener()
|
||||
|
||||
self._expired_listener = async_track_point_in_utc_time(
|
||||
self.hass, reset_state, dt_util.utcnow() + self._timeout)
|
||||
|
||||
self.async_schedule_update_ha_state()
|
||||
|
||||
async def async_camera_image(self):
|
||||
"""Return a still image response."""
|
||||
if self.queue:
|
||||
if self._state == STATE_IDLE:
|
||||
self.queue.rotate(1)
|
||||
self._current_image = self.queue[0]
|
||||
|
||||
return self._current_image
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this camera."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def motion_detection_enabled(self):
|
||||
"""Camera Motion Detection Status."""
|
||||
return False
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
name: value for name, value in (
|
||||
(ATTR_LAST_TRIP, self._last_trip),
|
||||
(ATTR_FILENAME, self._filename),
|
||||
) if value is not None
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import voluptuous as vol
|
||||
from homeassistant.const import CONF_PORT
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
|
||||
REQUIREMENTS = ['uvcclient==0.10.1']
|
||||
|
||||
@@ -41,25 +42,26 @@ def setup_platform(hass, config, add_devices, discovery_info=None):
|
||||
port = config[CONF_PORT]
|
||||
|
||||
from uvcclient import nvr
|
||||
nvrconn = nvr.UVCRemote(addr, port, key)
|
||||
try:
|
||||
# Exceptions may be raised in all method calls to the nvr library.
|
||||
nvrconn = nvr.UVCRemote(addr, port, key)
|
||||
cameras = nvrconn.index()
|
||||
|
||||
identifier = 'id' if nvrconn.server_version >= (3, 2, 0) else 'uuid'
|
||||
# Filter out airCam models, which are not supported in the latest
|
||||
# version of UnifiVideo and which are EOL by Ubiquiti
|
||||
cameras = [
|
||||
camera for camera in cameras
|
||||
if 'airCam' not in nvrconn.get_camera(camera[identifier])['model']]
|
||||
except nvr.NotAuthorized:
|
||||
_LOGGER.error("Authorization failure while connecting to NVR")
|
||||
return False
|
||||
except nvr.NvrError:
|
||||
_LOGGER.error("NVR refuses to talk to me")
|
||||
return False
|
||||
except nvr.NvrError as ex:
|
||||
_LOGGER.error("NVR refuses to talk to me: %s", str(ex))
|
||||
raise PlatformNotReady
|
||||
except requests.exceptions.ConnectionError as ex:
|
||||
_LOGGER.error("Unable to connect to NVR: %s", str(ex))
|
||||
return False
|
||||
|
||||
identifier = 'id' if nvrconn.server_version >= (3, 2, 0) else 'uuid'
|
||||
# Filter out airCam models, which are not supported in the latest
|
||||
# version of UnifiVideo and which are EOL by Ubiquiti
|
||||
cameras = [
|
||||
camera for camera in cameras
|
||||
if 'airCam' not in nvrconn.get_camera(camera[identifier])['model']]
|
||||
raise PlatformNotReady
|
||||
|
||||
add_devices([UnifiVideoCamera(nvrconn,
|
||||
camera[identifier],
|
||||
|
||||
@@ -67,8 +67,6 @@ async def async_setup_platform(hass, config, async_add_devices,
|
||||
]
|
||||
|
||||
for cam in config.get(CONF_CAMERAS, []):
|
||||
# https://github.com/PyCQA/pylint/issues/1830
|
||||
# pylint: disable=stop-iteration-return
|
||||
camera = next(
|
||||
(dc for dc in discovered_cameras
|
||||
if dc[CONF_IMAGE_NAME] == cam[CONF_IMAGE_NAME]), None)
|
||||
|
||||
164
homeassistant/components/camera/xiaomi.py
Normal file
164
homeassistant/components/camera/xiaomi.py
Normal file
@@ -0,0 +1,164 @@
|
||||
"""
|
||||
This component provides support for Xiaomi Cameras.
|
||||
|
||||
For more details about this platform, please refer to the documentation at
|
||||
https://home-assistant.io/components/camera.xiaomi/
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
|
||||
from homeassistant.components.ffmpeg import DATA_FFMPEG
|
||||
from homeassistant.const import (CONF_HOST, CONF_NAME, CONF_PATH,
|
||||
CONF_PASSWORD, CONF_PORT, CONF_USERNAME)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
|
||||
|
||||
DEPENDENCIES = ['ffmpeg']
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_BRAND = 'Xiaomi Home Camera'
|
||||
DEFAULT_PATH = '/media/mmcblk0p1/record'
|
||||
DEFAULT_PORT = 21
|
||||
DEFAULT_USERNAME = 'root'
|
||||
|
||||
CONF_FFMPEG_ARGUMENTS = 'ffmpeg_arguments'
|
||||
CONF_MODEL = 'model'
|
||||
|
||||
MODEL_YI = 'yi'
|
||||
MODEL_XIAOFANG = 'xiaofang'
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_NAME): cv.string,
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_MODEL): vol.Any(MODEL_YI,
|
||||
MODEL_XIAOFANG),
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.string,
|
||||
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
|
||||
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_FFMPEG_ARGUMENTS): cv.string
|
||||
})
|
||||
|
||||
|
||||
async def async_setup_platform(hass,
|
||||
config,
|
||||
async_add_devices,
|
||||
discovery_info=None):
|
||||
"""Set up a Xiaomi Camera."""
|
||||
_LOGGER.debug('Received configuration for model %s', config[CONF_MODEL])
|
||||
async_add_devices([XiaomiCamera(hass, config)])
|
||||
|
||||
|
||||
class XiaomiCamera(Camera):
|
||||
"""Define an implementation of a Xiaomi Camera."""
|
||||
|
||||
def __init__(self, hass, config):
|
||||
"""Initialize."""
|
||||
super().__init__()
|
||||
self._extra_arguments = config.get(CONF_FFMPEG_ARGUMENTS)
|
||||
self._last_image = None
|
||||
self._last_url = None
|
||||
self._manager = hass.data[DATA_FFMPEG]
|
||||
self._name = config[CONF_NAME]
|
||||
self.host = config[CONF_HOST]
|
||||
self._model = config[CONF_MODEL]
|
||||
self.port = config[CONF_PORT]
|
||||
self.path = config[CONF_PATH]
|
||||
self.user = config[CONF_USERNAME]
|
||||
self.passwd = config[CONF_PASSWORD]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this camera."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def brand(self):
|
||||
"""Return the camera brand."""
|
||||
return DEFAULT_BRAND
|
||||
|
||||
@property
|
||||
def model(self):
|
||||
"""Return the camera model."""
|
||||
return self._model
|
||||
|
||||
def get_latest_video_url(self):
|
||||
"""Retrieve the latest video file from the Xiaomi Camera FTP server."""
|
||||
from ftplib import FTP, error_perm
|
||||
|
||||
ftp = FTP(self.host)
|
||||
try:
|
||||
ftp.login(self.user, self.passwd)
|
||||
except error_perm as exc:
|
||||
_LOGGER.error('Camera login failed: %s', exc)
|
||||
return False
|
||||
|
||||
try:
|
||||
ftp.cwd(self.path)
|
||||
except error_perm as exc:
|
||||
_LOGGER.error('Unable to find path: %s - %s', self.path, exc)
|
||||
return False
|
||||
|
||||
dirs = [d for d in ftp.nlst() if '.' not in d]
|
||||
if not dirs:
|
||||
_LOGGER.warning("There don't appear to be any folders")
|
||||
return False
|
||||
|
||||
first_dir = dirs[-1]
|
||||
try:
|
||||
ftp.cwd(first_dir)
|
||||
except error_perm as exc:
|
||||
_LOGGER.error('Unable to find path: %s - %s', first_dir, exc)
|
||||
return False
|
||||
|
||||
if self._model == MODEL_XIAOFANG:
|
||||
dirs = [d for d in ftp.nlst() if '.' not in d]
|
||||
if not dirs:
|
||||
_LOGGER.warning("There don't appear to be any uploaded videos")
|
||||
return False
|
||||
|
||||
latest_dir = dirs[-1]
|
||||
ftp.cwd(latest_dir)
|
||||
|
||||
videos = [v for v in ftp.nlst() if '.tmp' not in v]
|
||||
if not videos:
|
||||
_LOGGER.info('Video folder "%s" is empty; delaying', latest_dir)
|
||||
return False
|
||||
|
||||
if self._model == MODEL_XIAOFANG:
|
||||
video = videos[-2]
|
||||
else:
|
||||
video = videos[-1]
|
||||
|
||||
return 'ftp://{0}:{1}@{2}:{3}{4}/{5}'.format(
|
||||
self.user, self.passwd, self.host, self.port, ftp.pwd(), video)
|
||||
|
||||
async def async_camera_image(self):
|
||||
"""Return a still image response from the camera."""
|
||||
from haffmpeg import ImageFrame, IMAGE_JPEG
|
||||
|
||||
url = await self.hass.async_add_job(self.get_latest_video_url)
|
||||
if url != self._last_url:
|
||||
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
|
||||
self._last_image = await asyncio.shield(ffmpeg.get_image(
|
||||
url, output_format=IMAGE_JPEG,
|
||||
extra_cmd=self._extra_arguments), loop=self.hass.loop)
|
||||
self._last_url = url
|
||||
|
||||
return self._last_image
|
||||
|
||||
async def handle_async_mjpeg_stream(self, request):
|
||||
"""Generate an HTTP MJPEG stream from the camera."""
|
||||
from haffmpeg import CameraMjpeg
|
||||
|
||||
stream = CameraMjpeg(self._manager.binary, loop=self.hass.loop)
|
||||
await stream.open_camera(
|
||||
self._last_url, extra_cmd=self._extra_arguments)
|
||||
|
||||
await async_aiohttp_proxy_stream(
|
||||
self.hass, request, stream,
|
||||
'multipart/x-mixed-replace;boundary=ffserver')
|
||||
await stream.close()
|
||||
@@ -11,11 +11,13 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.camera import Camera, PLATFORM_SCHEMA
|
||||
from homeassistant.components.ffmpeg import DATA_FFMPEG
|
||||
from homeassistant.const import (CONF_HOST, CONF_NAME, CONF_PATH,
|
||||
CONF_PASSWORD, CONF_PORT, CONF_USERNAME)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST, CONF_NAME, CONF_PATH, CONF_PASSWORD, CONF_PORT, CONF_USERNAME)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
|
||||
REQUIREMENTS = ['aioftp==0.10.1']
|
||||
DEPENDENCIES = ['ffmpeg']
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,12 +40,9 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
})
|
||||
|
||||
|
||||
async def async_setup_platform(hass,
|
||||
config,
|
||||
async_add_devices,
|
||||
discovery_info=None):
|
||||
async def async_setup_platform(
|
||||
hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up a Yi Camera."""
|
||||
_LOGGER.debug('Received configuration: %s', config)
|
||||
async_add_devices([YiCamera(hass, config)], True)
|
||||
|
||||
|
||||
@@ -57,68 +56,72 @@ class YiCamera(Camera):
|
||||
self._last_image = None
|
||||
self._last_url = None
|
||||
self._manager = hass.data[DATA_FFMPEG]
|
||||
self._name = config.get(CONF_NAME)
|
||||
self.host = config.get(CONF_HOST)
|
||||
self.port = config.get(CONF_PORT)
|
||||
self.path = config.get(CONF_PATH)
|
||||
self.user = config.get(CONF_USERNAME)
|
||||
self.passwd = config.get(CONF_PASSWORD)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this camera."""
|
||||
return self._name
|
||||
self._name = config[CONF_NAME]
|
||||
self.host = config[CONF_HOST]
|
||||
self.port = config[CONF_PORT]
|
||||
self.path = config[CONF_PATH]
|
||||
self.user = config[CONF_USERNAME]
|
||||
self.passwd = config[CONF_PASSWORD]
|
||||
|
||||
@property
|
||||
def brand(self):
|
||||
"""Camera brand."""
|
||||
return DEFAULT_BRAND
|
||||
|
||||
def get_latest_video_url(self):
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of this camera."""
|
||||
return self._name
|
||||
|
||||
async def _get_latest_video_url(self):
|
||||
"""Retrieve the latest video file from the customized Yi FTP server."""
|
||||
from ftplib import FTP, error_perm
|
||||
from aioftp import Client, StatusCodeError
|
||||
|
||||
ftp = FTP(self.host)
|
||||
ftp = Client(loop=self.hass.loop)
|
||||
try:
|
||||
ftp.login(self.user, self.passwd)
|
||||
except error_perm as exc:
|
||||
_LOGGER.error('There was an error while logging into the camera')
|
||||
_LOGGER.debug(exc)
|
||||
return False
|
||||
await ftp.connect(self.host)
|
||||
await ftp.login(self.user, self.passwd)
|
||||
except StatusCodeError as err:
|
||||
raise PlatformNotReady(err)
|
||||
|
||||
try:
|
||||
ftp.cwd(self.path)
|
||||
except error_perm as exc:
|
||||
_LOGGER.error('Unable to find path: %s', self.path)
|
||||
_LOGGER.debug(exc)
|
||||
return False
|
||||
await ftp.change_directory(self.path)
|
||||
dirs = []
|
||||
for path, attrs in await ftp.list():
|
||||
if attrs['type'] == 'dir' and '.' not in str(path):
|
||||
dirs.append(path)
|
||||
latest_dir = dirs[-1]
|
||||
await ftp.change_directory(latest_dir)
|
||||
|
||||
dirs = [d for d in ftp.nlst() if '.' not in d]
|
||||
if not dirs:
|
||||
_LOGGER.warning("There don't appear to be any uploaded videos")
|
||||
return False
|
||||
videos = []
|
||||
for path, _ in await ftp.list():
|
||||
videos.append(path)
|
||||
if not videos:
|
||||
_LOGGER.info('Video folder "%s" empty; delaying', latest_dir)
|
||||
return None
|
||||
|
||||
latest_dir = dirs[-1]
|
||||
ftp.cwd(latest_dir)
|
||||
videos = ftp.nlst()
|
||||
if not videos:
|
||||
_LOGGER.info('Video folder "%s" is empty; delaying', latest_dir)
|
||||
return False
|
||||
await ftp.quit()
|
||||
|
||||
return 'ftp://{0}:{1}@{2}:{3}{4}/{5}/{6}'.format(
|
||||
self.user, self.passwd, self.host, self.port, self.path,
|
||||
latest_dir, videos[-1])
|
||||
return 'ftp://{0}:{1}@{2}:{3}{4}/{5}/{6}'.format(
|
||||
self.user, self.passwd, self.host, self.port, self.path,
|
||||
latest_dir, videos[-1])
|
||||
except (ConnectionRefusedError, StatusCodeError) as err:
|
||||
_LOGGER.error('Error while fetching video: %s', err)
|
||||
return None
|
||||
|
||||
async def async_camera_image(self):
|
||||
"""Return a still image response from the camera."""
|
||||
from haffmpeg import ImageFrame, IMAGE_JPEG
|
||||
|
||||
url = await self.hass.async_add_job(self.get_latest_video_url)
|
||||
url = await self._get_latest_video_url()
|
||||
if url != self._last_url:
|
||||
ffmpeg = ImageFrame(self._manager.binary, loop=self.hass.loop)
|
||||
self._last_image = await asyncio.shield(ffmpeg.get_image(
|
||||
url, output_format=IMAGE_JPEG,
|
||||
extra_cmd=self._extra_arguments), loop=self.hass.loop)
|
||||
self._last_image = await asyncio.shield(
|
||||
ffmpeg.get_image(
|
||||
url,
|
||||
output_format=IMAGE_JPEG,
|
||||
extra_cmd=self._extra_arguments),
|
||||
loop=self.hass.loop)
|
||||
self._last_url = url
|
||||
|
||||
return self._last_image
|
||||
|
||||
@@ -49,7 +49,6 @@ def _get_image_url(hass, monitor, mode):
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
# pylint: disable=unused-argument
|
||||
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
|
||||
"""Set up the ZoneMinder cameras."""
|
||||
cameras = []
|
||||
|
||||
15
homeassistant/components/cast/.translations/ca.json
Normal file
15
homeassistant/components/cast/.translations/ca.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "No s'han trobat dispositius de Google Cast a la xarxa.",
|
||||
"single_instance_allowed": "Nom\u00e9s cal una \u00fanica configuraci\u00f3 de Google Cast."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Voleu configurar Google Cast?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/cs.json
Normal file
15
homeassistant/components/cast/.translations/cs.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "V s\u00edti nebyly nalezena \u017e\u00e1dn\u00e1 za\u0159\u00edzen\u00ed Google Cast.",
|
||||
"single_instance_allowed": "Pouze jedin\u00e1 konfigurace Google Cast je nezbytn\u00e1."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Chcete nastavit Google Cast?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
14
homeassistant/components/cast/.translations/de.json
Normal file
14
homeassistant/components/cast/.translations/de.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Keine Google Cast Ger\u00e4te im Netzwerk gefunden."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "M\u00f6chten Sie Google Cast einrichten?",
|
||||
"title": ""
|
||||
}
|
||||
},
|
||||
"title": ""
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/en.json
Normal file
15
homeassistant/components/cast/.translations/en.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "No Google Cast devices found on the network.",
|
||||
"single_instance_allowed": "Only a single configuration of Google Cast is necessary."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Do you want to setup Google Cast?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
14
homeassistant/components/cast/.translations/hu.json
Normal file
14
homeassistant/components/cast/.translations/hu.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Nem tal\u00e1lhat\u00f3k Google Cast eszk\u00f6z\u00f6k a h\u00e1l\u00f3zaton."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Be szeretn\u00e9d \u00e1ll\u00edtani a Google Cast szolg\u00e1ltat\u00e1st?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/it.json
Normal file
15
homeassistant/components/cast/.translations/it.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Nessun dispositivo Google Cast trovato in rete.",
|
||||
"single_instance_allowed": "\u00c8 necessaria una sola configurazione di Google Cast."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Vuoi configurare Google Cast?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/ko.json
Normal file
15
homeassistant/components/cast/.translations/ko.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Googgle Cast \uc7a5\uce58\uac00 \ub124\ud2b8\uc6cc\ud06c\uc5d0\uc11c \ubc1c\uacac\ub418\uc9c0 \uc54a\uc558\uc2b5\ub2c8\ub2e4.",
|
||||
"single_instance_allowed": "Google Cast\uc758 \ub2e8\uc77c \uad6c\uc131 \ub9cc \ud544\uc694\ud569\ub2c8\ub2e4."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Google Cast\ub97c \uc124\uc815 \ud558\uc2dc\uaca0\uc2b5\ub2c8\uae4c?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/lb.json
Normal file
15
homeassistant/components/cast/.translations/lb.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Keng Google Cast Apparater am Netzwierk fonnt.",
|
||||
"single_instance_allowed": "N\u00ebmmen eng eenzeg Konfiguratioun vun Google Cast ass n\u00e9ideg."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Soll Google Cast konfigur\u00e9iert ginn?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/nl.json
Normal file
15
homeassistant/components/cast/.translations/nl.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Geen Google Cast-apparaten gevonden op het netwerk.",
|
||||
"single_instance_allowed": "Er is slechts \u00e9\u00e9n configuratie van Google Cast nodig."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Wilt u Google Cast instellen?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/no.json
Normal file
15
homeassistant/components/cast/.translations/no.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Ingen Google Cast enheter funnet p\u00e5 nettverket.",
|
||||
"single_instance_allowed": "Kun en enkelt konfigurasjon av Google Cast er n\u00f8dvendig."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "\u00d8nsker du \u00e5 sette opp Google Cast?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
15
homeassistant/components/cast/.translations/pl.json
Normal file
15
homeassistant/components/cast/.translations/pl.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"no_devices_found": "Nie znaleziono w sieci urz\u0105dze\u0144 Google Cast.",
|
||||
"single_instance_allowed": "Wymagana jest tylko jedna konfiguracja Google Cast."
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Czy chcesz skonfigurowa\u0107 Google Cast?",
|
||||
"title": "Google Cast"
|
||||
}
|
||||
},
|
||||
"title": "Google Cast"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user