Compare commits

...

237 Commits

Author SHA1 Message Date
Koushik Dutta
711eb222ed postbeta 2023-07-26 17:35:07 -07:00
Koushik Dutta
19f8bfb74a unifi-protect: older controller fix for doorbells 2023-07-24 15:07:10 -07:00
Koushik Dutta
08a8428d6e openvino: update dep 2023-07-23 18:50:21 -07:00
Koushik Dutta
4feeeda904 openvino: restart if detection times out 2023-07-23 18:48:07 -07:00
Koushik Dutta
753373a691 server: validate local address 2023-07-22 18:54:23 -07:00
Koushik Dutta
2f3529b822 rebroadcast: simplify prebuffer sync frame search, remove dead code... 2023-07-21 18:16:13 -07:00
Koushik Dutta
2501d1460b sdk: fix frame generator signature 2023-07-21 18:12:51 -07:00
Koushik Dutta
e063637100 rebroadcast: Fix prebuffer sync frame search lol 2023-07-21 18:12:37 -07:00
Koushik Dutta
5ec0bf4bf3 Merge branch 'main' of github.com:koush/scrypted 2023-07-20 19:53:47 -07:00
Koushik Dutta
0c05b59121 tensorflow-lite: temp hack to fix wide angle detection 2023-07-20 19:53:43 -07:00
Koushik Dutta
cbbfa0b525 homekit: readme 2023-07-20 13:25:24 -07:00
Koushik Dutta
28835b1ccc unifi-protect: use new isDoorbell flag 2023-07-18 12:24:54 -07:00
Koushik Dutta
0585e7bbaf unifi: log characteristics 2023-07-18 11:42:53 -07:00
Koushik Dutta
b2040ea2c8 videoanalysis: fix motion suspend/timeout/resume interaction 2023-07-18 10:05:36 -07:00
Koushik Dutta
2fd2151b4f Merge branch 'main' of github.com:koush/scrypted 2023-07-18 09:30:19 -07:00
Koushik Dutta
4c7974519d homekit/webrtc: fix broken stapa handling on unifi 2023-07-18 09:30:15 -07:00
Brett Jia
d91c919558 arlo: upstreaming changes for versions 0.8.5 - 0.8.11 (#956)
* cancel motion and audio events after 60s

* retry on imap errors

* bump 0.8.5 for beta

* better detection of sse shutdown to avoid thrashing

* restart plugin on unrecoverable login error

* bump 0.8.6 for beta

* more error handling + bump curl-cffi

* bump 0.8.7 for release

* delay motion and audio event end triggers by 10s

* transfer sip ffmpeg params to stream signaling code

* bump 0.8.8 for beta

* allow customizing imap sender address

* bump 0.8.9 for beta

* bump 0.8.10 for release

* docs, imap backoff, use bs4 to parse 2fa email

* bump 0.8.11 for release
2023-07-16 14:46:48 -07:00
Koushik Dutta
7a297761bc homekit: fix mdns names? 2023-07-15 09:35:32 -07:00
Koushik Dutta
c15e10e5cf rtp: disable jitter buffer spam 2023-07-15 09:12:01 -07:00
Koushik Dutta
3494106857 python-client: fix message queues 2023-07-15 08:52:24 -07:00
Koushik Dutta
7d3dfb16f0 predict: rev model downloads for label normalization 2023-07-14 12:13:18 -07:00
Koushik Dutta
63fc223036 docker: move deadsnakes ppa to docker only. tf no longer supported local install. 2023-07-12 21:57:26 -07:00
Koushik Dutta
6736379858 ring: support reload login between auth and code 2023-07-11 19:02:43 -07:00
Koushik Dutta
7a811b2b22 ring: publish auth fixes 2023-07-11 16:59:54 -07:00
Koushik Dutta
dd5cb432c9 google-device-access: hack comment 2023-07-11 14:49:47 -07:00
Koushik Dutta
ab3a71ab49 Merge branch 'main' of github.com:koush/scrypted 2023-07-11 14:46:13 -07:00
Koushik Dutta
b5c9382180 google-device-access: fix webrtc negotiation via hack 2023-07-11 14:46:07 -07:00
Koushik Dutta
81682678ac Update README.md 2023-07-11 11:18:44 -07:00
Koushik Dutta
dec184629e Update README.md (#946)
* Update README.md

* Update README.md
2023-07-11 11:15:35 -07:00
dignabbit
f33bb53138 docker: improve management of avahi (#940)
Co-authored-by: Dignabbit <test@example.com>
2023-07-11 11:15:13 -07:00
Koushik Dutta
2d3957e086 Update README.md 2023-07-10 23:57:49 -07:00
Koushik Dutta
d16ed9e54f Update README.md 2023-07-10 23:57:01 -07:00
Koushik Dutta
d7e8052498 ring: remove push receiver token shim 2023-07-10 11:27:02 -07:00
Koushik Dutta
48cd3830a5 webrtc: pass through single packet stapa 2023-07-10 07:59:05 -07:00
Koushik Dutta
ce138d1a17 videoanalysis: log when object detection is zone filtered. 2023-07-08 11:29:15 -07:00
Koushik Dutta
7b4919fba9 Merge branch 'main' of github.com:koush/scrypted 2023-07-08 09:17:07 -07:00
Koushik Dutta
0b3dee3a03 ring: support custom controlCenterDisplayName 2023-07-08 09:16:54 -07:00
Raman Gupta
4cef09540b sdk: fix typing (#938) 2023-07-08 09:11:55 -07:00
Koushik Dutta
92583e568a ring: fix erroneous polling 2023-07-07 14:39:36 -07:00
Koushik Dutta
67aaa08c31 Merge branch 'main' of github.com:koush/scrypted 2023-07-06 08:12:59 -07:00
Koushik Dutta
2e9f618f6f snapshot: fix default behavior when snapshot url is provided on cameras without a Camera interface 2023-07-06 08:12:54 -07:00
Raman Gupta
bf4d39d6af sdk: Improve python generation (#931)
* Improve python generation

* tweak

* tweak

* Move classes to other
2023-07-06 08:04:19 -07:00
Raman Gupta
c31e68f720 Update connect_scrypted_client (#932) 2023-07-05 17:38:43 -07:00
Koushik Dutta
6d8b3c1ce7 ring: publish with more auth fixes 2023-07-05 12:47:05 -07:00
Koushik Dutta
106fef95b4 webrtc: notify track startup failure 2023-07-04 23:52:19 -07:00
Koushik Dutta
488d68ee1c python-client: initial implementation 2023-07-04 14:02:51 -07:00
Koushik Dutta
f7e35fb1ee Merge branch 'main' of github.com:koush/scrypted 2023-07-04 09:07:09 -07:00
Koushik Dutta
b1bf897bdb ring: notification fixes 2023-07-04 09:07:05 -07:00
Brett Jia
8eb533c220 arlo: update to 0.8.4 beta (#923)
* basestation debugging output

* faster 2 way startup

* fix type annotation

* separate thread for logging server + bump scrypted-arlo-go

* update backup auth hosts

* bump 0.8.1 for release

* further optimize 2 way startup latency

* bump 0.8.2 for release

* skip pings on battery doorbell

* bump 0.8.3 for beta

* more docs

* try fix cloudflare 403 with curl-cffi

* bump 0.8.4 for beta
2023-07-02 17:14:29 -07:00
Koushik Dutta
f10cdfbced opencv: handle frame size changes 2023-07-02 14:08:00 -07:00
Koushik Dutta
8f5e9e5a8c rebroadcast: keep trying to restart rtsp server 2023-07-02 12:53:19 -07:00
Koushik Dutta
cc0283ef39 videoanalysis: add pipeline hang logging 2023-07-02 08:47:10 -07:00
Koushik Dutta
5c7b67c973 videoanalysis: restart motion detection on stopped streams 2023-07-02 08:38:43 -07:00
Koushik Dutta
d1be0f1b4c Merge branch 'main' of github.com:koush/scrypted 2023-06-30 19:11:41 -07:00
Koushik Dutta
55d58d1e44 reolink: use new client per event listener 2023-06-30 19:11:37 -07:00
Koushik Dutta
d9dccf36a3 mail: readme 2023-06-30 17:58:23 -07:00
Koushik Dutta
33477fdf80 reolink/onvif: fix listener destroy throw error 2023-06-30 11:49:20 -07:00
Koushik Dutta
e6ece3aa3e videoanalysis: add anayze mode hint 2023-06-30 11:42:40 -07:00
Koushik Dutta
6a4126191b videoanalysis: settings tweaks 2023-06-30 11:07:06 -07:00
Koushik Dutta
e9f999b911 docker: simplify nvr storage instructions 2023-06-30 10:29:56 -07:00
Koushik Dutta
1fef31a081 docker: fix reversed logic 2023-06-29 21:14:52 -07:00
Koushik Dutta
659f99c33d docker: fix install on linux when /dev/dri is missing 2023-06-29 21:13:47 -07:00
Koushik Dutta
a9deff0046 webrtc: allow mac/ios types 2023-06-29 19:38:23 -07:00
Koushik Dutta
7a56cefe2a reolink: add support for reolink doorbells, deprecating onvif plugin usage 2023-06-29 09:44:40 -07:00
Koushik Dutta
a06c6e9568 webrtc: fix erroneous window laptop transcode. fix spurious NAL delimiter logging. 2023-06-28 20:33:24 -07:00
Koushik Dutta
56f127a203 webrtc: stapa/sei fix. stream start failure fix/logging. 2023-06-28 11:24:26 -07:00
Koushik Dutta
2ffe67b2db videoanalysis: fix cpu calc 2023-06-27 23:23:02 -07:00
Koushik Dutta
44dc648398 videoanalysis: uncap detection duration. disable snapshot fallback. use a max concurrent detection calcuation 2023-06-27 23:09:41 -07:00
Koushik Dutta
7807cc4bc6 Merge branch 'main' of github.com:koush/scrypted 2023-06-27 20:00:38 -07:00
Koushik Dutta
81fb690089 reolink: docs 2023-06-27 20:00:32 -07:00
Brett Jia
8b15617f6e arlo: various enhancements + upstreaming changes (#913)
* reorder models and add VMC4060P

* add VMC4060P

* use new UA for cloudscraper + bump scrypted-arlo-go

* bump 0.7.30 for release

* improve readme

* tcp logger server to collect individual camera output + add arlo baby to hw lists

* send exception guard logs to device logger

* bump scrypted-arlo-go with new logging interface

* log device-specific errors returned from arlo

* bump 0.7.31 for beta

* more error listeners and some comments

* experimental arlo baby fix

* bump 0.7.32 for beta

* arlo baby nightlight

* bump 0.7.33 for beta

* nightlight device name fix

* bump 0.7.34 for beta

* fix nightlight constructor

* bump 0.7.35 for beta

* bump 0.7.36 for release

* functional sip webrtc 2way

* refactored 2way code + various tweaks throughout

* document sip v2 endpoint

* update backup auth host

* bump 0.7.37 for release

* add media user agent

* sip refactoring bugfixes

* bump 0.8.0 for release
2023-06-26 20:01:53 -07:00
Koushik Dutta
fd8aa70352 rebroadcast: improve prebuffer session logging 2023-06-25 18:13:20 -07:00
Koushik Dutta
be888d215d alexa: fix doorbells 2023-06-25 10:28:46 -07:00
Koushik Dutta
ce5f568a5d server: fix non admin cli login. 2023-06-24 10:49:58 -07:00
Koushik Dutta
336220559f videoanalysis: fix potential leak 2023-06-22 23:32:15 -07:00
Koushik Dutta
8014060a54 rebroadcast :publish 2023-06-22 17:59:15 -07:00
Brett Jia
7f4c8997b9 snapshot: tell ffmpeg pipe input format (#902)
* snapshot: tell ffmpeg pipe input format

* use image2pipe
2023-06-21 16:09:42 -07:00
Koushik Dutta
9f73b92dbd Merge branch 'main' of github.com:koush/scrypted 2023-06-20 20:32:31 -07:00
Koushik Dutta
381892fca6 webrtc: fix dtls cookie race condition 2023-06-20 20:32:26 -07:00
Koushik Dutta
a28df23032 snapshot: add request timeout 2023-06-18 11:14:55 -07:00
Koushik Dutta
dc5456d36f tensorflow-lite: fix yolov8 uint8 to int8 color conversion 2023-06-17 23:50:46 -07:00
Koushik Dutta
3a23e8ed26 coreml: fix mobilenet url 2023-06-17 23:17:56 -07:00
Koushik Dutta
e0db86cb41 cameras: timeout snapshots to free socket 2023-06-17 23:09:23 -07:00
Koushik Dutta
37ccefebd1 tensorflow-lite: readme 2023-06-17 12:17:05 -07:00
Koushik Dutta
0076c4827f tensorflow-lite: yolov8 is not compatible with usb edgetpu 2023-06-17 12:14:49 -07:00
Koushik Dutta
c5c07d8169 tensorflow-lite: fall back to mobilenet if edgepu startup fails 2023-06-16 17:18:46 -07:00
Koushik Dutta
2372acc796 rebroadcast: cleanup sdp rejection 2023-06-16 15:38:48 -07:00
Brett Jia
6b9c3e4aa0 rebroadcast: recover after ffmpeg exits before printing sdp (#890)
* rebroadcast: recover after ffmpeg exits before printing sdp

* Revert "rebroadcast: recover after ffmpeg exits before printing sdp"

This reverts commit aee2124937.

* reject sdp promise on ffmpeg exit
2023-06-16 15:33:47 -07:00
Koushik Dutta
d5b652da8c ring: save push credentials, polling now disabled by default. publish beta 2023-06-16 12:08:22 -07:00
Koushik Dutta
2b9a0f082d predict: refactor, add support for yolov8 on tflite 2023-06-16 12:08:04 -07:00
Koushik Dutta
b10b4d047e openvino: fix labels 2023-06-15 14:09:41 -07:00
Koushik Dutta
74cd23bd88 openvino: functional yolov8 2023-06-15 11:46:37 -07:00
Koushik Dutta
ef742bdb23 coreml/openvino: yolov8 support 2023-06-15 00:54:53 -07:00
Koushik Dutta
6f7fa54f24 coreml: yolov8 default on apple silicon 2023-06-14 20:56:45 -07:00
Koushik Dutta
d9a575cb5a coreml: add yolov8 2023-06-14 18:43:29 -07:00
Koushik Dutta
29094afa4d server: fix typo 2023-06-14 12:35:17 -07:00
Koushik Dutta
62a92fe083 coreml/openvino: improve yolov4, add yolov3 to openvino 2023-06-12 21:39:33 -07:00
Koushik Dutta
9b8bde556c coreml: add yolov4-tiny model 2023-06-12 17:59:05 -07:00
Koushik Dutta
326ef11760 openvino: cleanup 2023-06-12 17:45:38 -07:00
Koushik Dutta
92a0b4a863 client: update sdk 2023-06-12 13:01:50 -07:00
Koushik Dutta
9fd3641455 openvino: support more models 2023-06-12 13:01:42 -07:00
Koushik Dutta
2918cf9ae1 core: ui fixes 2023-06-12 09:40:26 -07:00
Koushik Dutta
6f004db859 openvino: test other models 2023-06-11 23:29:37 -07:00
Koushik Dutta
367d741c5f openvino: fix models to accept rgb instead of bgr 2023-06-11 23:06:00 -07:00
Koushik Dutta
8f83894e49 python-codecs: implement hang protection 2023-06-11 14:24:53 -07:00
Koushik Dutta
ea6e33d159 videoanalysis: prevent snapshot throttling when its only a single camera 2023-06-11 09:13:05 -07:00
Koushik Dutta
1b5565b5b2 openvino: choose better defaults for precision 2023-06-11 08:48:55 -07:00
Koushik Dutta
19692d02c6 docker: update s6 2023-06-10 23:50:44 -07:00
Koushik Dutta
4179698c12 gh: switch back to gh builders 2023-06-10 20:38:50 -07:00
Koushik Dutta
1eea3a87d0 gh: switch back to gh builders 2023-06-10 20:38:07 -07:00
Koushik Dutta
ec89a77955 gh: switch back to gh builders 2023-06-10 20:33:30 -07:00
Koushik Dutta
443158286e gh: remove pi builder 2023-06-10 20:30:54 -07:00
Koushik Dutta
b168ca52c6 gh: remove pi builder 2023-06-10 20:27:46 -07:00
Koushik Dutta
fe01d3a1ba gh: remove pi builder 2023-06-10 20:26:12 -07:00
Koushik Dutta
18cad22627 ha: publish 2023-06-10 19:31:10 -07:00
Koushik Dutta
c67c9a028c ha: publish 2023-06-10 19:30:23 -07:00
Koushik Dutta
0cff8ad5ed docker: fix ffmpeg path 2023-06-10 18:59:11 -07:00
Koushik Dutta
0269959cf3 docker: use apt ffmpeg 2023-06-10 17:22:16 -07:00
Koushik Dutta
1b6de42eca ha: update 2023-06-10 16:49:25 -07:00
Koushik Dutta
39342d5d46 docker: Fix arch detection on pi builders 2023-06-10 16:28:58 -07:00
Koushik Dutta
c4b5af46d0 docker: Fix arch detection on pi builders 2023-06-10 16:27:30 -07:00
Koushik Dutta
a46235d095 python-codecs: publish 2023-06-10 15:25:56 -07:00
Koushik Dutta
848d490a66 Merge branch 'main' of github.com:koush/scrypted 2023-06-10 15:11:12 -07:00
Koushik Dutta
87fbb95157 postrelease 2023-06-10 15:10:25 -07:00
Koushik Dutta
c036da9ae0 Update test.yml 2023-06-09 16:27:09 -07:00
Koushik Dutta
1688fcc126 Merge branch 'main' of github.com:koush/scrypted 2023-06-09 16:17:43 -07:00
Koushik Dutta
99cae0ba31 docker: use nonfree intel media drivers 2023-06-09 16:17:39 -07:00
Koushik Dutta
a7b00b9e91 Update docker-common.yml 2023-06-08 21:18:44 -07:00
Koushik Dutta
3f2a62c6f2 docker: fix dist upgrade 2023-06-08 21:08:35 -07:00
Koushik Dutta
3fc318a370 Update docker.yml 2023-06-08 18:16:46 -07:00
Koushik Dutta
aed8575aa0 github: pi only allows 1 key on default acct 2023-06-08 17:54:08 -07:00
Koushik Dutta
2e28b50588 github: add rpi 4 builder 2023-06-08 17:41:32 -07:00
Koushik Dutta
2e87cc380f github: add rpi 4 builder 2023-06-08 17:34:00 -07:00
Koushik Dutta
1fdd2d4b01 github: rename secret priv key 2023-06-08 17:23:18 -07:00
Koushik Dutta
53b23b2ca8 Merge branch 'main' of github.com:koush/scrypted 2023-06-08 17:18:02 -07:00
Koushik Dutta
54016a9c78 github: update build push action 2023-06-08 17:17:58 -07:00
Koushik Dutta
d207a3b824 docker: switch from wget to curl 2023-06-08 17:16:54 -07:00
Koushik Dutta
e72a74d008 docker: clean up lite builds 2023-06-08 15:29:08 -07:00
Koushik Dutta
d1b907e45b Merge branch 'main' of github.com:koush/scrypted 2023-06-08 15:17:22 -07:00
Koushik Dutta
4a4c47ffe2 docker: clean up lite builds 2023-06-08 15:16:53 -07:00
Koushik Dutta
f6baf99935 Update docker.yml 2023-06-08 14:36:47 -07:00
Koushik Dutta
b5cc138e2b Update docker-common.yml 2023-06-08 14:33:28 -07:00
Koushik Dutta
40738a74cf Update docker-common.yml 2023-06-08 14:23:39 -07:00
Koushik Dutta
d2b1f104ca Update docker-common.yml 2023-06-08 14:17:10 -07:00
Koushik Dutta
6cb4f589c0 Update docker-common.yml 2023-06-08 14:10:01 -07:00
Koushik Dutta
5cf2b26630 Update docker-common.yml 2023-06-08 14:07:37 -07:00
Koushik Dutta
e7f16af04c Update docker-common.yml 2023-06-08 14:06:58 -07:00
Koushik Dutta
6287b9deaa Update docker-common.yml 2023-06-08 13:47:01 -07:00
Koushik Dutta
b9b5fdb712 docker: remove for loop 2023-06-08 13:40:39 -07:00
Koushik Dutta
c85af9c8a5 Merge branch 'main' of github.com:koush/scrypted 2023-06-08 13:36:25 -07:00
Koushik Dutta
069f765507 linux: fix multi python install 2023-06-08 13:36:23 -07:00
Koushik Dutta
0e587abc79 Update docker-common.yml 2023-06-08 13:27:11 -07:00
Koushik Dutta
47770c0a8d Update docker-common.yml 2023-06-08 13:18:23 -07:00
Koushik Dutta
82d1c3afe5 docker: revert sh expression 2023-06-08 12:54:54 -07:00
Koushik Dutta
1c9b52ce4f docker: move intel stuff into footer 2023-06-08 11:51:47 -07:00
Koushik Dutta
adcd9fa537 linux: move intel stuff out since it requires jammy 2023-06-08 11:47:06 -07:00
Koushik Dutta
91e2c2870b linux: quote commands for execution 2023-06-08 10:51:57 -07:00
Koushik Dutta
1fc892815d docker: fix piping 2023-06-08 10:32:07 -07:00
Koushik Dutta
38ed1acc15 docker: fix typo 2023-06-08 10:20:51 -07:00
Koushik Dutta
3bdc9ab930 docker: use intel repos for jammy 2023-06-08 10:11:02 -07:00
Koushik Dutta
bfa6346333 linux: fix dockerfile translation/exec 2023-06-08 10:04:19 -07:00
Koushik Dutta
fcbb308cb8 install: fix linux local syntax 2023-06-08 09:54:36 -07:00
Koushik Dutta
f137edcc8c install: fix linux local syntax 2023-06-08 09:53:17 -07:00
Koushik Dutta
53e6f083b9 docker: working jammy + tflite 2023-06-08 09:46:38 -07:00
Koushik Dutta
0f96fdb4bc tensorflow-lite: publish 2023-06-08 09:28:08 -07:00
Koushik Dutta
96ea3f3b27 postbeta 2023-06-08 09:22:54 -07:00
Koushik Dutta
a31d6482af postbeta 2023-06-08 09:12:21 -07:00
Koushik Dutta
be16bf7858 postbeta 2023-06-08 08:50:40 -07:00
Koushik Dutta
1dad0126bc postbeta 2023-06-08 08:08:24 -07:00
Koushik Dutta
9292ebbe48 tensorflow-lite: fix missing settings, add python version hints 2023-06-08 07:54:41 -07:00
Koushik Dutta
0b3a1a1998 docker: update before install 2023-06-07 16:25:22 -07:00
Koushik Dutta
b5d58b6899 Merge branch 'main' of github.com:koush/scrypted 2023-06-07 16:11:30 -07:00
Koushik Dutta
215a56f70e docker: jammy default 2023-06-07 16:11:08 -07:00
Koushik Dutta
c593701e72 gh: Update docker.yml 2023-06-07 15:59:53 -07:00
Koushik Dutta
46351f2fd7 docs: update 2023-06-07 15:22:35 -07:00
Koushik Dutta
9bce4acd14 postbeta 2023-06-07 15:20:38 -07:00
Koushik Dutta
cba20ec887 postbeta 2023-06-07 15:18:48 -07:00
Koushik Dutta
7c41516cce python-codecs: fix stride handling 2023-06-07 15:10:40 -07:00
Koushik Dutta
1f209072ba opencv: relax threshold defaults 2023-06-07 15:09:04 -07:00
Koushik Dutta
8978bff8a9 postbeta 2023-06-07 10:32:52 -07:00
Koushik Dutta
04c500b855 sdk: update 2023-06-07 10:32:18 -07:00
Koushik Dutta
8b4859579c rebroadcast: strip out all legacy audio handling 2023-06-07 08:34:45 -07:00
Koushik Dutta
90deaf1161 postbeta 2023-06-07 08:22:23 -07:00
Koushik Dutta
de56a8c653 server: remove dead code 2023-06-07 08:22:15 -07:00
Koushik Dutta
a5215ae92b Merge branch 'main' of github.com:koush/scrypted 2023-06-07 08:17:22 -07:00
Koushik Dutta
73cd40b540 server: strip and update dependencies 2023-06-07 08:17:13 -07:00
Koushik Dutta
93556dd404 postbeta 2023-06-07 07:40:15 -07:00
Brett Jia
125b436cb6 arlo: upstreaming changes (#844)
* remove webrtc emulation

* turn on two way audio by default

* add arloq pings and tweak log messages

* bump for release

* bump scrypted-arlo-go to remove unused code

* add arloqs pings

* better 2fa selection error msg + get sipinfo

* wip sip

* re-enable basestation push to talk

* bump for 0.7.24 release

* bump to working wheels

* disable MQTT backend and use SSE as default

* some login error handling

* remove dependency on cryptography and switch back to scrypted tool

* bump for 0.7.27 release

* implement DASH container

* expand documentation

* expand documentation

* bump for 0.7.28 beta

* discourage DASH further

* cleaner container selection

* tweak documentation

* tweak documentation

* bump for 0.7.29 release
2023-06-04 07:29:45 -04:00
Koushik Dutta
0a4ea032f5 client: include hostname property in login challenge 2023-06-02 15:36:05 -07:00
slyoldfox
c658cee5c9 sip: v0.0.9
* * Fix an issues in SIP.js where the ACK and BYE replies didn't go to the correct uri

* * Implemented outgoing SIP MESSAGE sending
* Adding voice mail check
* Adding a lock for a bticino doorbell

* Cleanup dependencies, code in sip, bticino plugins

* Cleanup dependencies, code in sip, bticino plugins

* Clear stale devices from our map and clear the voicemail check

* Do not require register() for a SIP call

* Narrow down the event matching to deletes of devices

* Use releaseDevice to clean up stale entries

* Fix uuid version

* Attempt to make two way audio work

* Attempt to make two way audio work - fine tuning

* Enable incoming doorbell events

* SipCall was never a "sip call" but more like a manager
SipSession was more the "sip call"

* * Rename sip registered session to persistent sip manager
* Allow handling of call pickup in homekit (hopefully!)

* * use the consoles from the camera object

* * use the consoles from the camera object

* * Fix the retry timer

* * Added webhook url

* * parse record route correctly

* * Add gruu and use a custom fork of sip.js which supports keepAlive SIP clients (and dropped Websocket)
* use cross-env in package.json

* Added webhook urls for faster handling of events

* Added videoclips

* plugins/sip 0.0.6

* plugins/bticino 0.0.7

* Implemented Reboot interface

* v0.0.9 which works with c300-controller

* better validation during creation of device
* automatically sets the correct settings depending on the data sent back from the controller

---------

Co-authored-by: Marc Vanbrabant <marc@foreach.be>
2023-06-02 13:37:52 -04:00
Koushik Dutta
6589176c8b Merge branch 'main' of github.com:koush/scrypted 2023-06-01 20:33:33 -07:00
Koushik Dutta
6c4c83f655 rebroadcast: hack fix for ffmpeg sdp race condition 2023-06-01 20:33:28 -07:00
Billy Zoellers
8d4124adda add types to support Air Purifier (#833)
* add types to support Air Purifier

* fix homekit type for airpurifier
2023-06-01 15:07:25 -04:00
Brett Jia
b7cda86df7 fix typo reported by community member (#831) 2023-05-29 17:23:24 -07:00
Koushik Dutta
6622e13e51 openvino: fix setting typo 2023-05-29 15:11:41 -07:00
Koushik Dutta
cbc45da679 openvino: add setting for compute target 2023-05-29 15:07:19 -07:00
Koushik Dutta
e7d06c66af gha: only do s6 builds 2023-05-29 10:21:57 -07:00
Koushik Dutta
ea02bc3b6f github: switch to jammy 2023-05-29 10:21:28 -07:00
Koushik Dutta
2b43cb7d15 postbeta 2023-05-29 10:20:00 -07:00
Koushik Dutta
f3c0362e18 server: prep for python3.10 2023-05-29 10:19:51 -07:00
Koushik Dutta
817ae42250 docker: fix install prompts 2023-05-28 19:58:12 -07:00
Koushik Dutta
8043f83f20 github: self hosted runner 2023-05-28 15:55:12 -07:00
Koushik Dutta
d33ab5dbcf gihub: self hosted runner 2023-05-28 15:54:50 -07:00
Koushik Dutta
2b1674bea8 docker/github: switch to jammy 2023-05-28 15:38:40 -07:00
Koushik Dutta
f045e59258 docker: normalize Dockerfile across arch 2023-05-28 12:57:55 -07:00
Koushik Dutta
9125aafc07 openvino: rollback 2023-05-28 12:55:13 -07:00
Koushik Dutta
6f5244ec9f videoanalysis: correctly pass motion zones to object detector 2023-05-28 09:01:21 -07:00
Koushik Dutta
f1eb2f988a openvino: unlock version for jammy 2023-05-27 23:10:35 -07:00
Koushik Dutta
1f659d9a72 python-codecs: move dimensions into caps 2023-05-27 23:09:42 -07:00
Koushik Dutta
dd98f12f2a python-codecs: fix pil rgba to jpg. fix image close race condition. 2023-05-27 22:46:55 -07:00
Koushik Dutta
2063e3822a docker: focal builds 2023-05-27 20:25:10 -07:00
Koushik Dutta
f7495a7a76 docker: update base image fingerprint 2023-05-27 18:16:45 -07:00
Koushik Dutta
fddb9c655f docker: use lunar 2023-05-27 18:05:32 -07:00
Koushik Dutta
297e7a7b4f docker: use jammy and lunar 2023-05-27 17:51:05 -07:00
Koushik Dutta
29e080f6b6 docker: switch back to ubuntu for better driver supports and deadsnakes ppa 2023-05-27 17:49:12 -07:00
Koushik Dutta
c72ea24794 python-codecs: fix vaapi post procesisng 2023-05-27 10:22:31 -07:00
Koushik Dutta
ada80796de homekit: fix basic fans 2023-05-27 09:37:30 -07:00
Koushik Dutta
1ebcf32998 python-codecs: fix vaapi gray output 2023-05-26 14:16:50 -07:00
Koushik Dutta
79765ba58e python-codecs: fix assert spam, code cleanups 2023-05-26 08:56:27 -07:00
Koushik Dutta
ff4665520c python-codecs: bug fixes 2023-05-25 23:34:49 -07:00
Koushik Dutta
be5b810335 python-codecs: cleanup code, add some fast paths 2023-05-25 23:08:15 -07:00
Koushik Dutta
fdc99b7fa6 python-codecs: major refactor to support hw acceleration and on demand color space conversion 2023-05-25 10:48:25 -07:00
Koushik Dutta
f730d13cbd ring: fix busted ass ring polling/push 2023-05-24 17:58:51 -07:00
Koushik Dutta
af02753cef server/core: support built in server updates 2023-05-23 12:04:02 -07:00
Koushik Dutta
9334d1c2a4 server: fix potential plugin startup hang 2023-05-23 08:48:26 -07:00
Koushik Dutta
71ecc07e2b webrtc: respect device pixel ratio 2023-05-23 01:44:29 -07:00
Koushik Dutta
5310dd5ff6 ui: social, account creation cleanups 2023-05-22 19:01:15 -07:00
Koushik Dutta
adf1a10659 sdk: image resize filters 2023-05-22 09:45:21 -07:00
Koushik Dutta
2ecc26c914 docker: use new install env var 2023-05-22 08:52:56 -07:00
Koushik Dutta
9a49416831 ha: use diff env var 2023-05-22 08:51:45 -07:00
Koushik Dutta
f0eff01898 ha: bump version, add env variable to prevent future notifications 2023-05-22 08:50:05 -07:00
Koushik Dutta
edd071739f python-codecs: dont feed preroll into queue 2023-05-21 22:48:06 -07:00
Koushik Dutta
ab81c568bc sdk: update 2023-05-21 22:44:14 -07:00
Koushik Dutta
62470df0af server: fix env anon login 2023-05-21 21:54:12 -07:00
Koushik Dutta
19b83eb056 postrelease 2023-05-21 21:53:43 -07:00
Koushik Dutta
b75d4cbfd4 postbeta 2023-05-21 21:52:41 -07:00
Koushik Dutta
8c0bb7b205 postrelease 2023-05-21 14:51:13 -07:00
204 changed files with 6458 additions and 3826 deletions

View File

@@ -1,50 +0,0 @@
name: Publish Scrypted (git HEAD)
on:
workflow_dispatch:
release:
types: [published]
jobs:
build:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
strategy:
matrix:
node: ["16-bullseye"]
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to Github Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted)
uses: docker/build-push-action@v2
with:
build-args: BASE=${{ matrix.node }}
context: .
file: docker/Dockerfile.HEAD
platforms: linux/amd64,linux/arm64,linux/armhf
push: true
tags: |
koush/scrypted:HEAD
ghcr.io/koush/scrypted:HEAD
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -2,56 +2,71 @@ name: Publish Scrypted Common
on:
workflow_dispatch:
release:
types: [published]
schedule:
# publish the common base once a month.
- cron: '30 8 2 * *'
jobs:
build:
name: Push Docker image to Docker Hub
# runs-on: self-hosted
runs-on: ubuntu-latest
strategy:
matrix:
NODE_VERSION: ["18"]
BASE: ["bullseye", "bookworm"]
BASE: ["jammy"]
FLAVOR: ["full", "lite", "thin"]
steps:
- name: Check out the repo
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v2
# - name: Set up SSH
# uses: MrSquaare/ssh-setup-action@v2
# with:
# host: 192.168.2.124
# private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
# - name: Set up SSH
# uses: MrSquaare/ssh-setup-action@v2
# with:
# host: 192.168.2.119
# private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v2
# with:
# platforms: linux/arm64,linux/armhf
# append: |
# - endpoint: ssh://koush@192.168.2.124
# # platforms: linux/arm64
# platforms: linux/arm64
# # - endpoint: ssh://koush@192.168.2.119
# # platforms: linux/armhf
- name: Login to Docker Hub
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to Github Container Registry
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted-common)
uses: docker/build-push-action@v2
uses: docker/build-push-action@v4
with:
build-args: |
NODE_VERSION=${{ matrix.NODE_VERSION }}
BASE=${{ matrix.BASE }}
context: install/docker/
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
platforms: linux/amd64,linux/arm64,linux/armhf
platforms: linux/amd64,linux/armhf,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
# ${{ matrix.NODE_VERSION == '16-bullseye' && 'koush/scrypted-common:latest' || '' }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -15,10 +15,11 @@ on:
jobs:
build:
name: Push Docker image to Docker Hub
# runs-on: self-hosted
runs-on: ubuntu-latest
strategy:
matrix:
BASE: ["18-bullseye-full", "18-bullseye-lite", "18-bullseye-thin"]
BASE: ["18-jammy-full", "18-jammy-lite", "18-jammy-thin"]
SUPERVISOR: ["", ".s6"]
steps:
- name: Check out the repo
@@ -38,9 +39,29 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
# - name: Set up SSH
# uses: MrSquaare/ssh-setup-action@v2
# with:
# host: 192.168.2.124
# private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
# - name: Set up SSH
# uses: MrSquaare/ssh-setup-action@v2
# with:
# host: 192.168.2.119
# private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
# with:
# platforms: linux/arm64,linux/armhf
# append: |
# - endpoint: ssh://koush@192.168.2.124
# # platforms: linux/arm64
# platforms: linux/arm64
# # - endpoint: ssh://koush@192.168.2.119
# # platforms: linux/armhf
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
@@ -55,7 +76,7 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image
uses: docker/build-push-action@v3
uses: docker/build-push-action@v4
with:
build-args: |
BASE=${{ matrix.BASE }}
@@ -66,19 +87,19 @@ jobs:
push: true
tags: |
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '' && 'koush/scrypted:thin' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:thin-s6' || '' }}
${{ matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '' && 'koush/scrypted:thin' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:thin-s6' || '' }}
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:thin' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:thin-s6' || '' }}
${{ matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:thin' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:thin-s6' || '' }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -3,9 +3,9 @@ name: Test
on:
push:
branches: ["main"]
paths: ["docker/**", ".github/workflows/test.yml"]
paths: ["install/**", ".github/workflows/test.yml"]
pull_request:
paths: ["docker/**", ".github/workflows/test.yml"]
paths: ["install/**", ".github/workflows/test.yml"]
workflow_dispatch:
jobs:

2
.gitignore vendored
View File

@@ -1,2 +1,4 @@
.DS_Store
__pycache__
venv
.venv

View File

@@ -1,59 +1,20 @@
# Scrypted
Scrypted is a high performance home video integration and automation platform.
* Video load instantly, everywhere: [Demo](https://www.reddit.com/r/homebridge/comments/r34k6b/if_youre_using_homebridge_for_cameras_ditch_it/)
* [HomeKit Secure Video Support](https://github.com/koush/scrypted/wiki/HomeKit-Secure-Video-Setup)
* Google Home support: "Ok Google, Stream Backyard"
* Alexa Support: Streaming to Alexa app on iOS/Android and Echo Show.
Scrypted is a high performance home video integration platform and NVR with smart detections. [Instant, low latency, streaming](https://streamable.com/xbxn7z) to HomeKit, Google Home, and Alexa. Supports most cameras. [Learn more](https://docs.scrypted.app).
<img width="400" alt="Scrypted_Management_Console" src="https://user-images.githubusercontent.com/73924/185666320-ae972867-6c2c-488a-8413-fd8a215e9fee.png">
<img src="https://github.com/koush/scrypted/assets/73924/57e1d556-cd3d-4448-81f9-a6c51b6513de">
# Installation
## Installation and Documentation
Select the appropriate guide. After installation is finished, remember to visit [HomeKit Secure Video Setup](https://github.com/koush/scrypted/wiki/HomeKit-Secure-Video-Setup).
Installation and camera onboarding instructions can be found in the [docs](https://docs.scrypted.app).
* [Raspberry Pi](https://github.com/koush/scrypted/wiki/Installation:-Raspberry-Pi)
* Linux
* [Docker Compose](https://github.com/koush/scrypted/wiki/Installation:-Docker-Compose-Linux) - This is the recommended method. Local installation may interfere with other server software.
* [Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-Linux) - Use Docker Compose. This is a reference documentation.
* [Local Installation](https://github.com/koush/scrypted/wiki/Installation:-Linux) - Use this if Docker scares you or whatever.
* Mac
* [Local Installation](https://github.com/koush/scrypted/wiki/Installation:-Mac)
<!-- * Docker Desktop is [not supported](https://github.com/koush/scrypted/wiki/Installation:-Docker-Desktop). -->
* Windows
* [Local Installation](https://github.com/koush/scrypted/wiki/Installation:-Windows)
* [WSL2 Installation](https://github.com/koush/scrypted/wiki/Installation:-WSL2-Windows)
* [Home Assistant OS](https://github.com/koush/scrypted/wiki/Installation:-Home-Assistant-OS)
<!-- * Docker Desktop is [not supported](https://github.com/koush/scrypted/wiki/Installation:-Docker-Desktop). -->
* [ReadyNAS: Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-ReadyNAS)
* [Synology: Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-Synology-NAS)
* [QNAP: Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-QNAP-NAS)
* [Unraid: Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-Unraid)
## Discord
Chat on Discord for support, tips, announcements, and bug reporting. There is an active and helpful community.
[Join Scrypted Discord](https://discord.gg/DcFzmBHYGq)
## Wiki
There are many topics covered in the [Scrypted Wiki](https://github.com/koush/scrypted/wiki) sidebar. Review them for documented support, tips, and guides before asking for assistance on GitHub or Discord.
## Supported Platforms
* Google Home
* Apple HomeKit
* Amazon Alexa
Supported accessories:
* Camera and Core Plugins: https://github.com/koush/scrypted/tree/main/plugins
* Community Plugins: https://github.com/orgs/scryptedapp/repositories
## Community
Scrypted has active communities on [Discord](https://discord.gg/DcFzmBHYGq), [Reddit](https://reddit.com/r/scrypted), and [Github](https://github.com/koush/scrypted). Check them out if you have questions!
## Development
## Debug Scrypted Plugins in VSCode
## Debug Scrypted Plugins in VS Code
```sh
# this is an example for homekit.
@@ -66,7 +27,7 @@ cd scrypted
code plugins/homekit
```
You can now launch (using the Start Debugging play button) the HomeKit Plugin in VSCode. Please be aware that you do *not* need to restart the Scrypted Server if you make changes to a plugin. Edit the plugin, launch, and the updated plugin will deploy on the running server.
You can now launch (using the Start Debugging play button) the HomeKit Plugin in VS Code. Please be aware that you do *not* need to restart the Scrypted Server if you make changes to a plugin. Edit the plugin, launch, and the updated plugin will deploy on the running server.
If you do not want to set up VS Code, you can also run build and install the plugin directly from the command line:
@@ -80,7 +41,7 @@ npm run build && npm run scrypted-deploy 127.0.0.1
Want to write your own plugin? Full documentation is available here: https://developer.scrypted.app
## Debug the Scrypted Server in VSCode
## Debug the Scrypted Server in VS Code
Debugging the server should not be necessary, as the server only provides the hosting and RPC mechanism for plugins. The following is for reference purpose. Most development can be done by debugging the relevant plugin.
@@ -94,4 +55,4 @@ cd scrypted
code server
```
You can now launch the Scrypted Server in VSCode.
You can now launch the Scrypted Server in VS Code.

View File

@@ -4,6 +4,7 @@ import { EventEmitter } from 'events';
import { Server } from 'net';
import { Duplex } from 'stream';
import { cloneDeep } from './clone-deep';
import { Deferred } from "./deferred";
import { listenZeroSingleClient } from './listen-cluster';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from './media-helpers';
import { createRtspParser } from "./rtsp-server";
@@ -228,6 +229,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
ffmpegLogInitialOutput(console, cp, undefined, options?.storage);
cp.on('exit', () => kill(new Error('ffmpeg exited')));
const deferredStart = new Deferred<void>();
// now parse the created pipes
const start = () => {
for (const p of startParsers) {
@@ -246,6 +248,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
await deferredStart.promise;
events.emit(container, chunk);
resetActivityTimer();
}
@@ -257,21 +260,26 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
});
};
await parseVideoCodec(cp);
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
rtsp.sdp.then(sdp => {
const parsed = parseSdp(sdp);
const audio = parsed.msections.find(msection=>msection.type === 'audio');
const video = parsed.msections.find(msection=>msection.type === 'video');
const audio = parsed.msections.find(msection => msection.type === 'audio');
const video = parsed.msections.find(msection => msection.type === 'video');
inputVideoCodec = video?.codec;
inputAudioCodec = audio?.codec;
});
const sdp = rtsp.sdp.then(sdpString => [Buffer.from(sdpString)]);
const sdp = new Deferred<Buffer[]>();
rtsp.sdp.then(r => sdp.resolve([Buffer.from(r)]));
killed.then(() => sdp.reject(new Error("ffmpeg killed before sdp could be parsed")));
start();
return {
start,
sdp,
start() {
deferredStart.resolve();
},
sdp: sdp.promise,
get inputAudioCodec() {
return inputAudioCodec;
},

View File

@@ -1,6 +1,6 @@
import net from 'net';
import { once } from 'events';
import dgram, { SocketType } from 'dgram';
import { once } from 'events';
import net from 'net';
export async function closeQuiet(socket: dgram.Socket | net.Server) {
if (!socket)
@@ -37,6 +37,23 @@ export async function createBindZero(socketType?: SocketType) {
return createBindUdp(0, socketType);
}
export async function createSquentialBindZero(socketType?: SocketType) {
let attempts = 0;
while (true) {
const rtpServer = await createBindZero(socketType);
try {
const rtcpServer = await createBindUdp(rtpServer.port + 1, socketType);
return [rtpServer, rtcpServer];
}
catch (e) {
attempts++;
closeQuiet(rtpServer.server);
}
if (attempts === 10)
throw new Error('unable to reserve sequential udp ports')
}
}
export async function reserveUdpPort() {
const udp = await createBindZero();
await new Promise(resolve => udp.server.close(() => resolve(undefined)));
@@ -62,4 +79,4 @@ export async function bind(server: dgram.Socket, port: number) {
}
}
export { listenZero, listenZeroSingleClient, ListenZeroSingleClientTimeoutError } from "@scrypted/server/src/listen-zero";
export { ListenZeroSingleClientTimeoutError, listenZero, listenZeroSingleClient } from "@scrypted/server/src/listen-zero";

View File

@@ -6,14 +6,14 @@ import { parseHTTPHeadersQuotedKeyValueSet } from 'http-auth-utils/dist/utils';
import net from 'net';
import { Duplex, Readable, Writable } from 'stream';
import tls from 'tls';
import { URL } from 'url';
import { Deferred } from './deferred';
import { closeQuiet, createBindUdp, createBindZero, listenZeroSingleClient } from './listen-cluster';
import { closeQuiet, createBindZero, createSquentialBindZero, listenZeroSingleClient } from './listen-cluster';
import { timeoutPromise } from './promise-utils';
import { readLength, readLine } from './read-stream';
import { MSection, parseSdp } from './sdp-utils';
import { sleep } from './sleep';
import { StreamChunk, StreamParser, StreamParserOptions } from './stream-parser';
import { URL } from 'url';
const REQUIRED_WWW_AUTHENTICATE_KEYS = ['realm', 'nonce'];
@@ -195,48 +195,17 @@ export function createRtspParser(options?: StreamParserOptions): RtspStreamParse
'-f', 'rtsp',
],
findSyncFrame(streamChunks: StreamChunk[]) {
let foundIndex: number;
let nonVideo: {
[codec: string]: StreamChunk,
} = {};
const createSyncFrame = () => {
const ret = streamChunks.slice(foundIndex);
// for (const nv of Object.values(nonVideo)) {
// ret.unshift(nv);
// }
return ret;
}
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
const streamChunk = streamChunks[prebufferIndex];
if (streamChunk.type !== 'h264') {
nonVideo[streamChunk.type] = streamChunk;
continue;
}
if (findH264NaluType(streamChunk, H264_NAL_TYPE_SPS))
foundIndex = prebufferIndex;
}
if (foundIndex !== undefined)
return createSyncFrame();
nonVideo = {};
// some streams don't contain codec info, so find an idr frame instead.
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
const streamChunk = streamChunks[prebufferIndex];
if (streamChunk.type !== 'h264') {
nonVideo[streamChunk.type] = streamChunk;
continue;
if (findH264NaluType(streamChunk, H264_NAL_TYPE_SPS) || findH264NaluType(streamChunk, H264_NAL_TYPE_IDR)) {
return streamChunks.slice(prebufferIndex);
}
if (findH264NaluType(streamChunk, H264_NAL_TYPE_IDR))
foundIndex = prebufferIndex;
}
if (foundIndex !== undefined)
return createSyncFrame();
// oh well!
},
sdp: new Promise<string>(r => resolve = r),
@@ -964,8 +933,7 @@ export class RtspServer {
const match = transport.match(/.*?client_port=([0-9]+)-([0-9]+)/);
const [_, rtp, rtcp] = match;
const rtpServer = await createBindZero();
const rtcpServer = await createBindUdp(rtpServer.port + 1);
const [rtpServer, rtcpServer] = await createSquentialBindZero();
this.client.on('close', () => closeQuiet(rtpServer.server));
this.client.on('close', () => closeQuiet(rtcpServer.server));
this.setupTracks[msection.control] = {

View File

@@ -1,6 +1,6 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "18-bullseye-full.s6-v0.13.2"
version: "18-jammy-full.s6-v0.39.4"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"
@@ -27,6 +27,7 @@ environment:
SCRYPTED_NVR_VOLUME: "/data/scrypted_nvr"
SCRYPTED_ADMIN_ADDRESS: "172.30.32.2"
SCRYPTED_ADMIN_USERNAME: "homeassistant"
SCRYPTED_INSTALL_ENVIRONMENT: "ha"
backup_exclude:
- '/server/**'
- '/data/scrypted_nvr/**'

View File

@@ -1,4 +1,4 @@
ARG BASE="18-bullseye-full"
ARG BASE="18-jammy-full"
FROM koush/scrypted-common:${BASE}
WORKDIR /

View File

@@ -1,4 +1,4 @@
ARG BASE="16-bullseye"
ARG BASE="16-jammy"
FROM koush/scrypted-common:${BASE}
WORKDIR /

View File

@@ -6,63 +6,59 @@
# This common file will be used by both Docker and the linux
# install script.
################################################################
ARG BASE="bullseye"
FROM debian:${BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
ffmpeg \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
RUN apt-get update && apt-get install -y nodejs
# python native
RUN apt-get -y install \
python3 \
python3-dev \
python3-pip \
python3-setuptools \
python3-wheel
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update
RUN apt-get -y install libedgetpu1-std
# intel opencl gpu for openvino
RUN if [ "$(uname -m)" = "x86_64" ]; \
then \
apt-get -y install \
intel-opencl-icd; \
fi
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
RUN apt-get -y upgrade
# base development stuff
RUN apt-get -y install \
build-essential \
cmake \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
libvips \
pkg-config
RUN apt-get -y update && apt-get -y install libedgetpu1-std
# these are necessary for pillow-simd, additional on disk size is small
# but could consider removing this.
RUN apt-get -y install \
libjpeg-dev zlib1g-dev
# plugins support fallback to pillow, but vips is faster.
RUN apt-get -y install \
libvips
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
RUN apt-get -y install \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
gstreamer1.0-vaapi
# python native
# python3 gstreamer bindings
RUN apt-get -y install \
python3 \
python3-dev \
python3-gst-1.0 \
python3-pip \
python3-setuptools \
python3-wheel
python3-gst-1.0
# armv7l does not have wheels for any of these
# and compile times would forever, if it works at all.
@@ -70,21 +66,22 @@ RUN apt-get -y install \
# which causes weird behavior in python which looks at the arch version
# which still reports 64bit, even if running in 32bit docker.
# this scenario is not supported and will be reported at runtime.
RUN if [ "$(uname -m)" != "x86_64" ]; \
then \
apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage; \
fi
# this bit is not necessary on amd64, but leaving it for consistency.
RUN apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage
# python pip
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install debugpy typing_extensions psutil
@@ -96,15 +93,42 @@ RUN python3 -m pip install debugpy typing_extensions psutil
################################################################
FROM header as base
ENV SCRYPTED_DOCKER_SERVE="true"
# intel opencl gpu for openvino
RUN bash -c "if [ \"$(uname -m)\" == \"x86_64\" ]; \
then \
apt-get update && apt-get install -y gpg-agent && \
rm -f /usr/share/keyrings/intel-graphics.gpg && \
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg && \
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
apt-get -y update && \
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free && \
apt-get -y dist-upgrade; \
fi"
# python 3.9 from ppa.
# 3.9 is the version with prebuilt support for tensorflow lite
RUN add-apt-repository ppa:deadsnakes/ppa && \
apt-get -y install \
python3.9 \
python3.9-dev \
python3.9-distutils
RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3.9 -m pip install debugpy typing_extensions psutil
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/ffmpeg"
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_DOCKER_FLAVOR=full
ENV SCRYPTED_BASE_VERSION="20230608"
ENV SCRYPTED_DOCKER_FLAVOR="full"
################################################################
# End section generated from template/Dockerfile.full.footer

View File

@@ -1,27 +1,24 @@
ARG BASE="bullseye"
FROM debian:${BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
RUN apt-get -y update
RUN apt-get -y upgrade
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
# base development stuff
RUN apt-get -y install \
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
ffmpeg \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
pkg-config
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update && apt-get install -y nodejs
# python native
RUN apt-get -y install \
@@ -36,12 +33,15 @@ RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install debugpy typing_extensions psutil
ENV SCRYPTED_DOCKER_SERVE="true"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/ffmpeg"
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_DOCKER_FLAVOR=lite
ENV SCRYPTED_BASE_VERSION="20230608"
ENV SCRYPTED_DOCKER_FLAVOR="lite"

View File

@@ -1,4 +1,4 @@
FROM koush/18-bullseye-full.s6
FROM koush/18-jammy-full.s6
WORKDIR /

View File

@@ -1,8 +1,8 @@
ARG BASE="18-bullseye-full"
ARG BASE="18-jammy-full"
FROM koush/scrypted-common:${BASE}
# avahi advertiser support
RUN apt-get -y install \
RUN apt-get update && apt-get -y install \
libnss-mdns \
avahi-discover \
libavahi-compat-libdnssd-dev \
@@ -12,13 +12,14 @@ RUN apt-get -y install \
COPY fs /
# s6 process supervisor
ARG S6_OVERLAY_VERSION=3.1.1.2
ARG S6_OVERLAY_VERSION=3.1.5.0
ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
ENV S6_KEEP_ENV=1
RUN case "$(uname -m)" in \
x86_64) S6_ARCH='x86_64';; \
armv7l) S6_ARCH='armhf';; \
aarch64) S6_ARCH='aarch64';; \
ARG TARGETARCH
RUN case "${TARGETARCH}" in \
amd64) S6_ARCH='x86_64';; \
arm) S6_ARCH='armhf';; \
arm64) S6_ARCH='aarch64';; \
*) echo "Your system architecture isn't supported."; exit 1 ;; \
esac \
&& cd /tmp \

View File

@@ -1,25 +1,25 @@
ARG BASE="bullseye"
FROM debian:${BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -y update && \
apt-get -y upgrade && \
apt-get -y install curl software-properties-common apt-utils ffmpeg
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash - && apt-get update && apt-get install -y nodejs
RUN apt-get -y update
RUN apt-get -y upgrade
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
ENV SCRYPTED_DOCKER_SERVE="true"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/ffmpeg"
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_DOCKER_FLAVOR=thin
ENV SCRYPTED_BASE_VERSION="20230608"
ENV SCRYPTED_DOCKER_FLAVOR="thin"

View File

@@ -1,3 +1,3 @@
./docker-build.sh
docker build -t koush/scrypted:18-bullseye-full.nvidia -f Dockerfile.nvidia
docker build -t koush/scrypted:18-jammy-full.nvidia -f Dockerfile.nvidia

View File

@@ -3,7 +3,8 @@
set -x
NODE_VERSION=18
IMAGE_BASE=bookworm
SCRYPTED_INSTALL_VERSION=beta
IMAGE_BASE=jammy
FLAVOR=full
BASE=$NODE_VERSION-$IMAGE_BASE-$FLAVOR
echo $BASE
@@ -14,4 +15,4 @@ docker build -t koush/scrypted-common:$BASE -f Dockerfile.$FLAVOR \
--build-arg NODE_VERSION=$NODE_VERSION --build-arg BASE=$IMAGE_BASE . && \
\
docker build -t koush/scrypted:$SUPERVISOR_BASE -f Dockerfile$SUPERVISOR \
--build-arg BASE=$BASE .
--build-arg BASE=$BASE --build-arg SCRYPTED_INSTALL_VERSION=$SCRYPTED_INSTALL_VERSION .

View File

@@ -3,9 +3,10 @@ version: "3.5"
# The Scrypted docker-compose.yml file typically resides at:
# ~/.scrypted/docker-compose.yml
# Scrypted NVR Storage (Optional Network Volume: Part 1 of 3)
# Example volumes SMB (CIFS) and NFS.
# Uncomment only one.
# volumes:
# nvr:
# driver_opts:
@@ -20,38 +21,38 @@ version: "3.5"
services:
scrypted:
image: koush/scrypted
environment:
# Scrypted NVR Storage (Part 2 of 3)
# Uncomment the next line to configure the NVR plugin to store recordings
# use the /nvr directory within the container. This can also be configured
# within the plugin manually.
# The drive or network share will ALSO need to be configured in the volumes
# section below.
# - SCRYPTED_NVR_VOLUME=/nvr
- SCRYPTED_WEBHOOK_UPDATE_AUTHORIZATION=Bearer SET_THIS_TO_SOME_RANDOM_TEXT
- SCRYPTED_WEBHOOK_UPDATE=http://localhost:10444/v1/update
# nvidia support
# Uncomment next 3 lines for Nvidia GPU support.
# - NVIDIA_VISIBLE_DEVICES=all
# - NVIDIA_DRIVER_CAPABILITIES=all
# runtime: nvidia
container_name: scrypted
restart: unless-stopped
network_mode: host
devices:
# hardware accelerated video decoding, opencl, etc.
- /dev/dri:/dev/dri
# uncomment below as necessary.
# zwave usb serial device
# - /dev/ttyACM0:/dev/ttyACM0
# all usb devices, such as coral tpu
# - /dev/bus/usb:/dev/bus/usb
# coral PCI devices
# - /dev/apex_0:/dev/apex_0
# - /dev/apex_1:/dev/apex_1
# Uncomment next line to run avahi-daemon inside the container
# Don't use if dbus and avahi run on the host and are bind-mounted
# (see below under "volumes")
# - SCRYPTED_DOCKER_AVAHI=true
# runtime: nvidia
volumes:
- ~/.scrypted/volume:/server/volume
# modify and add the additional volume for Scrypted NVR
# the following example would mount the /mnt/sda/video path on the host
# Scrypted NVR Storage (Part 3 of 3)
# Modify to add the additional volume for Scrypted NVR.
# The following example would mount the /mnt/sda/video path on the host
# to the /nvr path inside the docker container.
# - /mnt/sda/video:/nvr
# or use a network mount from one of the examples above
# Or use a network mount from one of the CIFS/NFS examples at the top of this file.
# - type: volume
# source: nvr
# target: /nvr
@@ -60,8 +61,29 @@ services:
# uncomment the following lines to expose Avahi, an mDNS advertiser.
# make sure Avahi is running on the host machine, otherwise this will not work.
# not compatible with Avahi enabled via SCRYPTED_DOCKER_AVAHI=true
# - /var/run/dbus:/var/run/dbus
# - /var/run/avahi-daemon/socket:/var/run/avahi-daemon/socket
# Default volume for the Scrypted database. Typically should not be changed.
- ~/.scrypted/volume:/server/volume
devices:
# all usb devices, such as coral tpu
- /dev/bus/usb:/dev/bus/usb
# hardware accelerated video decoding, opencl, etc.
# - /dev/dri:/dev/dri
# uncomment below as necessary.
# zwave usb serial device
# - /dev/ttyACM0:/dev/ttyACM0
# coral PCI devices
# - /dev/apex_0:/dev/apex_0
# - /dev/apex_1:/dev/apex_1
container_name: scrypted
restart: unless-stopped
network_mode: host
image: koush/scrypted
# logging is noisy and will unnecessarily wear on flash storage.
# scrypted has per device in memory logging that is preferred.
logging:

View File

@@ -1,7 +1,7 @@
#!/bin/bash
if [ -z "$SCRYPTED_DOCKER_AVAHI" ]
then
if [[ "${SCRYPTED_DOCKER_AVAHI}" != "true" ]]; then
echo "SCRYPTED_DOCKER_AVAHI != true, not starting avahi-daemon" >/dev/stderr
while true
do
sleep 1000
@@ -13,4 +13,4 @@ until [ -e /var/run/dbus/system_bus_socket ]; do
sleep 1s
done
echo "Starting Avahi daemon..."
exec avahi-daemon --no-chroot -f /etc/avahi/avahi-daemon.conf
exec avahi-daemon --no-chroot -f /etc/avahi/avahi-daemon.conf

View File

@@ -1,4 +1,12 @@
#!/bin/bash
if [[ "${SCRYPTED_DOCKER_AVAHI}" != "true" ]]; then
echo "SCRYPTED_DOCKER_AVAHI != true, not starting dbus-daemon" >/dev/stderr
while true
do
sleep 1000
done
fi
echo "Starting dbus..."
exec dbus-daemon --system --nofork
exec dbus-daemon --system --nofork

View File

@@ -1,5 +1,15 @@
#!/bin/bash
if [[ "${SCRYPTED_DOCKER_AVAHI}" != "true" ]]; then
echo "SCRYPTED_DOCKER_AVAHI != true, won't manage dbus nor avahi-daemon" >/dev/stderr
exit 0
fi
if grep -qE " ((/var)?/run/dbus|(/var)?/run/avahi-daemon(/socket)?) " /proc/mounts; then
echo "dbus and/or avahi-daemon volumes are bind mounted, won't touch them" >/dev/stderr
exit 0
fi
# make run folders
mkdir -p /var/run/dbus
mkdir -p /var/run/avahi-daemon
@@ -22,4 +32,4 @@ if [ ! -z "$DSM_HOSTNAME" ]; then
sed -i "s/.*host-name.*/host-name=${DSM_HOSTNAME}/" /etc/avahi/avahi-daemon.conf
else
sed -i "s/.*host-name.*/#host-name=/" /etc/avahi/avahi-daemon.conf
fi
fi

View File

@@ -43,6 +43,10 @@ WATCHTOWER_HTTP_API_TOKEN=$(echo $RANDOM | md5sum)
DOCKER_COMPOSE_YML=$SCRYPTED_HOME/docker-compose.yml
echo "Created $DOCKER_COMPOSE_YML"
curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/docker-compose.yml | sed s/SET_THIS_TO_SOME_RANDOM_TEXT/"$(echo $RANDOM | md5sum | head -c 32)"/g > $DOCKER_COMPOSE_YML
if [ -d /dev/dri ]
then
sed -i 's/'#' - \/dev\/dri/- \/dev\/dri/g' $DOCKER_COMPOSE_YML
fi
echo "Setting permissions on $SCRYPTED_HOME"
chown -R $SERVICE_USER $SCRYPTED_HOME

View File

@@ -3,15 +3,42 @@
################################################################
FROM header as base
ENV SCRYPTED_DOCKER_SERVE="true"
# intel opencl gpu for openvino
RUN bash -c "if [ \"$(uname -m)\" == \"x86_64\" ]; \
then \
apt-get update && apt-get install -y gpg-agent && \
rm -f /usr/share/keyrings/intel-graphics.gpg && \
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg && \
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
apt-get -y update && \
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free && \
apt-get -y dist-upgrade; \
fi"
# python 3.9 from ppa.
# 3.9 is the version with prebuilt support for tensorflow lite
RUN add-apt-repository ppa:deadsnakes/ppa && \
apt-get -y install \
python3.9 \
python3.9-dev \
python3.9-distutils
RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3.9 -m pip install debugpy typing_extensions psutil
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/ffmpeg"
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_DOCKER_FLAVOR=full
ENV SCRYPTED_BASE_VERSION="20230608"
ENV SCRYPTED_DOCKER_FLAVOR="full"
################################################################
# End section generated from template/Dockerfile.full.footer

View File

@@ -3,63 +3,59 @@
# This common file will be used by both Docker and the linux
# install script.
################################################################
ARG BASE="bullseye"
FROM debian:${BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
ffmpeg \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
RUN apt-get update && apt-get install -y nodejs
# python native
RUN apt-get -y install \
python3 \
python3-dev \
python3-pip \
python3-setuptools \
python3-wheel
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update
RUN apt-get -y install libedgetpu1-std
# intel opencl gpu for openvino
RUN if [ "$(uname -m)" = "x86_64" ]; \
then \
apt-get -y install \
intel-opencl-icd; \
fi
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
RUN apt-get -y upgrade
# base development stuff
RUN apt-get -y install \
build-essential \
cmake \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
libvips \
pkg-config
RUN apt-get -y update && apt-get -y install libedgetpu1-std
# these are necessary for pillow-simd, additional on disk size is small
# but could consider removing this.
RUN apt-get -y install \
libjpeg-dev zlib1g-dev
# plugins support fallback to pillow, but vips is faster.
RUN apt-get -y install \
libvips
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
RUN apt-get -y install \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
gstreamer1.0-vaapi
# python native
# python3 gstreamer bindings
RUN apt-get -y install \
python3 \
python3-dev \
python3-gst-1.0 \
python3-pip \
python3-setuptools \
python3-wheel
python3-gst-1.0
# armv7l does not have wheels for any of these
# and compile times would forever, if it works at all.
@@ -67,21 +63,22 @@ RUN apt-get -y install \
# which causes weird behavior in python which looks at the arch version
# which still reports 64bit, even if running in 32bit docker.
# this scenario is not supported and will be reported at runtime.
RUN if [ "$(uname -m)" != "x86_64" ]; \
then \
apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage; \
fi
# this bit is not necessary on amd64, but leaving it for consistency.
RUN apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage
# python pip
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install debugpy typing_extensions psutil

View File

@@ -45,7 +45,7 @@ ARG() {
}
ENV() {
echo "ignoring ENV $1"
export $@
}
source <(curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/template/Dockerfile.full.header)

View File

@@ -9,7 +9,7 @@
"version": "1.1.54",
"license": "ISC",
"dependencies": {
"@scrypted/types": "^0.2.91",
"@scrypted/types": "^0.2.94",
"axios": "^0.25.0",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"
@@ -21,9 +21,9 @@
}
},
"node_modules/@scrypted/types": {
"version": "0.2.91",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.91.tgz",
"integrity": "sha512-GfWil8cl2QwlTXk506ZXDALQfuv7zN48PtPlpmBMO/IYTQFtb+RB2zr+FwC9gdvRaZgs9NCCS2Fiig1OY7uxdQ=="
"version": "0.2.94",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.94.tgz",
"integrity": "sha512-615C6lLnJGk0qhp+Y72B3xeD2CS9p/h8JUmFDjKh4H4IjL6zlV10tZVAXWQt3Q5rmy1WAaS3nScR6NgxZ5woOA=="
},
"node_modules/@socket.io/component-emitter": {
"version": "3.1.0",

View File

@@ -17,7 +17,7 @@
"typescript": "^4.9.5"
},
"dependencies": {
"@scrypted/types": "^0.2.91",
"@scrypted/types": "^0.2.94",
"axios": "^0.25.0",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"

View File

@@ -161,6 +161,7 @@ export async function checkScryptedClientLogin(options?: ScryptedConnectionOptio
const directAddress = response.headers['x-scrypted-direct-address'];
return {
hostname: response.data.hostname as string,
redirect: response.data.redirect as string,
username: response.data.username as string,
expiration: response.data.expiration as number,

1
packages/python-client/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
.venv

View File

@@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${workspaceFolder}/test.py",
"console": "integratedTerminal",
"justMyCode": true
}
]
}

View File

@@ -0,0 +1 @@
../../server/python/plugin_remote.py

View File

@@ -0,0 +1,3 @@
python-engineio[asyncio_client]
aiohttp
aiodns

View File

@@ -0,0 +1 @@
../../server/python/rpc.py

View File

@@ -0,0 +1 @@
../../server/python/rpc_reader.py

View File

@@ -0,0 +1 @@
../../sdk/types/scrypted_python

View File

@@ -0,0 +1,151 @@
from __future__ import annotations
import asyncio
import os
from contextlib import nullcontext
import aiohttp
import engineio
import plugin_remote
import rpc_reader
from plugin_remote import DeviceManager, MediaManager, SystemManager
from scrypted_python.scrypted_sdk import ScryptedInterface, ScryptedStatic
class EioRpcTransport(rpc_reader.RpcTransport):
def __init__(self, loop: asyncio.AbstractEventLoop):
super().__init__()
self.eio = engineio.AsyncClient(ssl_verify=False)
self.loop = loop
self.write_error: Exception = None
self.read_queue = asyncio.Queue()
self.write_queue = asyncio.Queue()
@self.eio.on("message")
def on_message(data):
self.read_queue.put_nowait(data)
asyncio.run_coroutine_threadsafe(self.send_loop(), self.loop)
async def read(self):
return await self.read_queue.get()
async def send_loop(self):
while True:
data = await self.write_queue.get()
try:
await self.eio.send(data)
except Exception as e:
self.write_error = e
self.write_queue = None
break
def writeBuffer(self, buffer, reject):
async def send():
try:
if self.write_error:
raise self.write_error
self.write_queue.put_nowait(buffer)
except Exception as e:
reject(e)
asyncio.run_coroutine_threadsafe(send(), self.loop)
def writeJSON(self, json, reject):
return self.writeBuffer(json, reject)
async def connect_scrypted_client(
transport: EioRpcTransport,
base_url: str,
username: str,
password: str,
plugin_id: str = "@scrypted/core",
session: aiohttp.ClientSession | None = None,
) -> ScryptedStatic:
login_url = f"{base_url}/login"
login_body = {
"username": username,
"password": password,
}
if session:
cm = nullcontext(session)
else:
cm = aiohttp.ClientSession()
async with cm as _session:
async with _session.post(
login_url, verify_ssl=False, json=login_body
) as response:
login_response = await response.json()
headers = {"Authorization": login_response["authorization"]}
await transport.eio.connect(
base_url,
headers=headers,
engineio_path=f"/endpoint/{plugin_id}/engine.io/api/",
)
ret = asyncio.Future[ScryptedStatic](loop=transport.loop)
peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(
transport.loop, transport
)
peer.params["print"] = print
def callback(api, pluginId, hostInfo):
remote = plugin_remote.PluginRemote(
peer, api, pluginId, hostInfo, transport.loop
)
wrapped = remote.setSystemState
async def remoteSetSystemState(systemState):
await wrapped(systemState)
async def resolve():
sdk = ScryptedStatic()
sdk.api = api
sdk.remote = remote
sdk.systemManager = SystemManager(api, remote.systemState)
sdk.deviceManager = DeviceManager(
remote.nativeIds, sdk.systemManager
)
sdk.mediaManager = MediaManager(await api.getMediaManager())
ret.set_result(sdk)
asyncio.run_coroutine_threadsafe(resolve(), transport.loop)
remote.setSystemState = remoteSetSystemState
return remote
peer.params["getRemote"] = callback
asyncio.run_coroutine_threadsafe(peerReadLoop(), transport.loop)
sdk = await ret
return sdk
async def main():
transport = EioRpcTransport(asyncio.get_event_loop())
sdk = await connect_scrypted_client(
transport,
"https://localhost:10443",
os.environ["SCRYPTED_USERNAME"],
os.environ["SCRYPTED_PASSWORD"],
)
for id in sdk.systemManager.getSystemState():
device = sdk.systemManager.getDeviceById(id)
print(device.name)
if ScryptedInterface.OnOff.value in device.interfaces:
print(f"OnOff: device is {device.on}")
await transport.eio.disconnect()
os._exit(0)
loop = asyncio.new_event_loop()
asyncio.run_coroutine_threadsafe(main(), loop)
loop.run_forever()

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/alexa",
"version": "0.2.5",
"version": "0.2.6",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/alexa",
"version": "0.2.5",
"version": "0.2.6",
"dependencies": {
"axios": "^1.3.4",
"uuid": "^9.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/alexa",
"version": "0.2.5",
"version": "0.2.6",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -6,11 +6,11 @@ import { supportedTypes } from ".";
supportedTypes.set(ScryptedDeviceType.Doorbell, {
async discover(device: ScryptedDevice): Promise<Partial<DiscoveryEndpoint>> {
let capabilities: any[] = [];
let category: DisplayCategory = 'DOORBELL';
const displayCategories: DisplayCategory[] = ['DOORBELL'];
if (device.interfaces.includes(ScryptedInterface.RTCSignalingChannel)) {
capabilities = await getCameraCapabilities(device);
category = 'CAMERA';
displayCategories.push('CAMERA');
}
if (device.interfaces.includes(ScryptedInterface.BinarySensor)) {
@@ -25,7 +25,7 @@ supportedTypes.set(ScryptedDeviceType.Doorbell, {
}
return {
displayCategories: [category],
displayCategories,
capabilities
};
},

View File

@@ -1,26 +1,25 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.122",
"lockfileVersion": 2,
"version": "0.0.123",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.122",
"version": "0.0.123",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/multiparty": "^0.0.33",
"multiparty": "^4.2.2"
"multiparty": "^4.2.3"
},
"devDependencies": {
"@types/node": "^18.15.11"
"@types/node": "^18.16.18"
}
},
"../../common": {
"name": "@scrypted/common",
"version": "1.0.1",
"license": "ISC",
"dependencies": {
@@ -35,8 +34,7 @@
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.87",
"version": "0.2.103",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -71,9 +69,6 @@
"typedoc": "^0.23.21"
}
},
"../sdk": {
"extraneous": true
},
"node_modules/@koush/axios-digest-auth": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.5.tgz",
@@ -100,9 +95,9 @@
}
},
"node_modules/@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
"version": "18.16.18",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.16.18.tgz",
"integrity": "sha512-/aNaQZD0+iSBAGnvvN2Cx92HqE5sZCPZtx2TsK+4nvV23fFe09jVDvpArXr2j9DnYlzuU9WuoykDDc6wqvpNcw=="
},
"node_modules/auth-header": {
"version": "1.0.0",
@@ -120,15 +115,15 @@
"node_modules/depd": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
"integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=",
"integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/follow-redirects": {
"version": "1.14.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz",
"integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w==",
"version": "1.15.2",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
"funding": [
{
"type": "individual",
@@ -145,15 +140,15 @@
}
},
"node_modules/http-errors": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz",
"integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==",
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz",
"integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==",
"dependencies": {
"depd": "~1.1.2",
"inherits": "2.0.4",
"setprototypeof": "1.2.0",
"statuses": ">= 1.5.0 < 2",
"toidentifier": "1.0.0"
"toidentifier": "1.0.1"
},
"engines": {
"node": ">= 0.6"
@@ -165,11 +160,11 @@
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/multiparty": {
"version": "4.2.2",
"resolved": "https://registry.npmjs.org/multiparty/-/multiparty-4.2.2.tgz",
"integrity": "sha512-NtZLjlvsjcoGrzojtwQwn/Tm90aWJ6XXtPppYF4WmOk/6ncdwMMKggFY2NlRRN9yiCEIVxpOfPWahVEG2HAG8Q==",
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/multiparty/-/multiparty-4.2.3.tgz",
"integrity": "sha512-Ak6EUJZuhGS8hJ3c2fY6UW5MbkGUPMBEGd13djUzoY/BHqV/gTuFWtC6IuVA7A2+v3yjBS6c4or50xhzTQZImQ==",
"dependencies": {
"http-errors": "~1.8.0",
"http-errors": "~1.8.1",
"safe-buffer": "5.2.1",
"uid-safe": "2.1.5"
},
@@ -180,7 +175,7 @@
"node_modules/random-bytes": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz",
"integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=",
"integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==",
"engines": {
"node": ">= 0.8"
}
@@ -212,15 +207,15 @@
"node_modules/statuses": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
"integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=",
"integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/toidentifier": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
"integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
"integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==",
"engines": {
"node": ">=0.6"
}
@@ -236,147 +231,5 @@
"node": ">= 0.8"
}
}
},
"dependencies": {
"@koush/axios-digest-auth": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.5.tgz",
"integrity": "sha512-EZMM0gMJ3hMUD4EuUqSwP6UGt5Vmw2TZtY7Ypec55AnxkExSXM0ySgPtqkAcnL43g1R27yAg/dQL7dRTLMqO3Q==",
"requires": {
"auth-header": "^1.0.0",
"axios": "^0.21.4"
}
},
"@scrypted/common": {
"version": "file:../../common",
"requires": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"@types/node": "^16.9.0",
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^4.4.3"
}
},
"@scrypted/sdk": {
"version": "file:../../sdk",
"requires": {
"@babel/preset-typescript": "^7.18.6",
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"stringify-object": "^3.3.0",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"ts-node": "^10.4.0",
"typedoc": "^0.23.21",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
}
},
"@types/multiparty": {
"version": "0.0.33",
"resolved": "https://registry.npmjs.org/@types/multiparty/-/multiparty-0.0.33.tgz",
"integrity": "sha512-Il6cJUpSqgojT7NxbVJUvXkCblm50/yEJYtblISDsNIeNYf4yMAhdizzidUk6h8pJ8yhwK/3Fkb+3Dwcgtwl8w==",
"requires": {
"@types/node": "*"
}
},
"@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"auth-header": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/auth-header/-/auth-header-1.0.0.tgz",
"integrity": "sha512-CPPazq09YVDUNNVWo4oSPTQmtwIzHusZhQmahCKvIsk0/xH6U3QsMAv3sM+7+Q0B1K2KJ/Q38OND317uXs4NHA=="
},
"axios": {
"version": "0.21.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz",
"integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==",
"requires": {
"follow-redirects": "^1.14.0"
}
},
"depd": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
"integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak="
},
"follow-redirects": {
"version": "1.14.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz",
"integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w=="
},
"http-errors": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz",
"integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==",
"requires": {
"depd": "~1.1.2",
"inherits": "2.0.4",
"setprototypeof": "1.2.0",
"statuses": ">= 1.5.0 < 2",
"toidentifier": "1.0.0"
}
},
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"multiparty": {
"version": "4.2.2",
"resolved": "https://registry.npmjs.org/multiparty/-/multiparty-4.2.2.tgz",
"integrity": "sha512-NtZLjlvsjcoGrzojtwQwn/Tm90aWJ6XXtPppYF4WmOk/6ncdwMMKggFY2NlRRN9yiCEIVxpOfPWahVEG2HAG8Q==",
"requires": {
"http-errors": "~1.8.0",
"safe-buffer": "5.2.1",
"uid-safe": "2.1.5"
}
},
"random-bytes": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz",
"integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs="
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
},
"setprototypeof": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
},
"statuses": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
"integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow="
},
"toidentifier": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
"integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
},
"uid-safe": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz",
"integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==",
"requires": {
"random-bytes": "~1.0.0"
}
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.122",
"version": "0.0.123",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -39,9 +39,9 @@
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/multiparty": "^0.0.33",
"multiparty": "^4.2.2"
"multiparty": "^4.2.3"
},
"devDependencies": {
"@types/node": "^18.15.11"
"@types/node": "^18.16.18"
}
}

View File

@@ -71,6 +71,7 @@ export class AmcrestCameraClient {
method: "GET",
responseType: 'arraybuffer',
url: `http://${this.ip}/cgi-bin/snapshot.cgi`,
timeout: 60000,
});
return Buffer.from(response.data);

View File

@@ -1,15 +1,41 @@
# Arlo Plugin for Scrypted
The Arlo Plugin connects Scrypted to Arlo cloud, allowing you to access all of your Arlo cameras in Scrypted.
The Arlo Plugin connects Scrypted to Arlo Cloud, allowing you to access all of your Arlo cameras in Scrypted.
It is highly recommended to create a dedicated Arlo account for use with this plugin and share your cameras from your main account, as Arlo only permits one connection to their servers per account. Using a separate account allows you to use the Arlo app or website simultaneously with this plugin.
It is highly recommended to create a dedicated Arlo account for use with this plugin and share your cameras from your main account, as Arlo only permits one active login to their servers per account. Using a separate account allows you to use the Arlo app or website simultaneously with this plugin, otherwise logging in from one place will log you out from all other devices.
The account you use for this plugin must have either SMS or email set as the default 2FA option. Once you enter your username and password on the plugin settings page, you should receive a 2FA code through your default 2FA option. Enter that code into the provided box, and your cameras will appear in Scrypted. Or, see below for configuring IMAP to auto-login with 2FA.
If you experience any trouble logging in, clear the username and password boxes, reload the plugin, and try again.
If you are unable to see shared cameras in your separate Arlo account, ensure that both your primary and secondary accounts are upgraded according to this [forum post](https://web.archive.org/web/20230710141914/https://community.arlo.com/t5/Arlo-Secure/Invited-friend-cannot-see-devices-on-their-dashboard-Arlo-Pro-2/m-p/1889396#M1813). Verify the sharing worked by logging in via the Arlo web dashboard.
## General Setup Notes
* Ensure that your Arlo account's default 2FA option is set to either SMS or email.
* Motion events notifications should be turned on in the Arlo app. If you are receiving motion push notifications, Scrypted will also receive motion events.
* Disable smart detection and any cloud/local recording in the Arlo app. Arlo Cloud only permits one active stream per camera, so any smart detection or recording features may prevent downstream plugins (e.g. Homekit) from successfully pulling the video feed after a motion event.
* It is highly recommended to enable the Rebroadcast plugin to allow multiple downstream plugins to pull the video feed within Scrypted.
* If there is no audio on your camera, switch to the `FFmpeg (TCP)` parser under the `Cloud RTSP` settings.
* Prebuffering should only be enabled if the camera is wired to a persistent power source, such as a wall outlet. Prebuffering will only work if your camera does not have a battery or `Plugged In to External Power` is selected.
* The plugin supports pulling RTSP or DASH streams from Arlo Cloud. It is recommended to use RTSP for the lowest latency streams. DASH is inconsistent in reliability, and may return finicky codecs that require additional FFmpeg output arguments, e.g. `-vcodec h264`. *Note that both RTSP and DASH will ultimately pull the same video stream feed from your camera, and they cannot both be used at the same time due to the single stream limitation.*
Note that streaming cameras uses extra Internet bandwidth, since video and audio packets will need to travel from the camera through your network, out to Arlo Cloud, and then back to your network and into Scrypted.
## IMAP 2FA
The Arlo Plugin supports using the IMAP protocol to check an email mailbox for Arlo 2FA codes. This requires you to specify an email 2FA option as the default in your Arlo account settings.
The plugin should work with any mailbox that supports IMAP, but so far has been tested with Gmail. To configure a Gmail mailbox, see [here](https://support.google.com/mail/answer/7126229?hl=en) to see the Gmail IMAP settings, and [here](https://support.google.com/accounts/answer/185833?hl=en) to create an App Password. Enter the App Password in place of your normal Gmail password.
The plugin should work with any mailbox that supports IMAP, but so far has been tested with Gmail. To configure a Gmail mailbox, see [here](https://support.google.com/mail/answer/7126229?hl=en) to see the Gmail IMAP settings, and [here](https://support.google.com/accounts/answer/185833?hl=en) to create an App Password. Enter the App Password in place of your normal Gmail password.
The plugin searches for emails sent by Arlo's `do_not_reply@arlo.com` address when looking for 2FA codes. If you are using a service to forward emails to the mailbox registered with this plugin (e.g. a service like iCloud's Hide My Email), it is possible that Arlo's email sender address has been overwritten by the mail forwarder. Check the email registered with this plugin to see what address the mail forwarder uses to replace Arlo's sender address, and update that in the IMAP 2FA settings.
## Virtual Security System for Arlo Sirens
In external integrations like Homekit, sirens are exposed as simple on-off switches. This makes it easy to accidentally hit the switch when using the Home app. The Arlo Plugin creates a "virtual" security system device per siren to allow Scrypted to arm or disarm the siren switch to protect against accidental triggers. This fake security system device will be synced into Homekit as a separate accessory from the camera, with the siren itself merged into the security system accessory.
Note that the virtual security system is NOT tied to your Arlo account at all, and will not make any changes such as switching your device's motion alert armed/disarmed modes. For more information, please see the README on the virtual security system device in Scrypted.
## Video Clips
The Arlo Plugin will show video clips available in Arlo Cloud for cameras with cloud recording enabled. These clips are not downloaded onto your Scrypted server, but rather streamed on-demand. Deleting clips is not available in Scrypted and should be done through the Arlo app or the Arlo web dashboard.

View File

@@ -1,19 +1,20 @@
{
"name": "@scrypted/arlo",
"version": "0.7.21",
"version": "0.8.11",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/arlo",
"version": "0.7.21",
"version": "0.8.11",
"license": "Apache",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.101",
"version": "0.2.103",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,7 +1,8 @@
{
"name": "@scrypted/arlo",
"version": "0.7.21",
"version": "0.8.11",
"description": "Arlo Plugin for Scrypted",
"license": "Apache",
"keywords": [
"scrypted",
"plugin",

View File

@@ -41,6 +41,7 @@ import math
import random
import time
import uuid
from urllib.parse import urlparse, parse_qs
stream_class = MQTTStream
@@ -74,20 +75,33 @@ USER_AGENTS = {
"Gecko/20100101 Firefox/85.0",
"linux":
"Mozilla/5.0 (X11; Linux x86_64) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36"
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.96 Safari/537.36",
# extracted from cloudscraper as a working UA for cloudflare
"android":
"Mozilla/5.0 (Linux; U; Android 8.1.0; zh-cn; PACM00 Build/O11019) "
"AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/57.0.2987.132 MQQBrowser/8.8 Mobile Safari/537.36"
}
# user agents for media players, e.g. the android app
MEDIA_USER_AGENTS = {
"android": "ijkplayer-android-4.5_28538"
}
class Arlo(object):
BASE_URL = 'my.arlo.com'
AUTH_URL = 'ocapi-app.arlo.com'
BACKUP_AUTH_HOSTS = ['NTIuMjEwLjMuMTIx', 'MzQuMjU1LjkyLjIxMg==', 'MzQuMjUxLjE3Ny45MA==', 'NTQuMjQ2LjE3MS4x']
TRANSID_PREFIX = 'web'
random.shuffle(BACKUP_AUTH_HOSTS)
def __init__(self, username, password):
self.username = username
self.password = password
self.event_stream = None
self.request = Request()
self.request = None
def to_timestamp(self, dt):
if sys.version[0] == '2':
@@ -136,6 +150,7 @@ class Arlo(object):
self.user_id = user_id
headers['Content-Type'] = 'application/json; charset=UTF-8'
headers['User-Agent'] = USER_AGENTS['arlo']
self.request = Request(mode="cloudscraper")
self.request.session.headers.update(headers)
self.BASE_URL = 'myapi.arlo.com'
@@ -146,7 +161,6 @@ class Arlo(object):
'schemaVersion': '1',
'Auth-Version': '2',
'Content-Type': 'application/json; charset=UTF-8',
'User-Agent': USER_AGENTS['arlo'],
'Origin': f'https://{self.BASE_URL}',
'Referer': f'https://{self.BASE_URL}/',
'Source': 'arloCamWeb',
@@ -166,13 +180,11 @@ class Arlo(object):
# in case cloudflare rejects our auth request...
logger.warning(f"Using fallback authentication host due to: {e}")
backup_hosts = list(scrypted_arlo_go.BACKUP_AUTH_HOSTS())
random.shuffle(backup_hosts)
auth_host = pick_host([
base64.b64decode(h.encode("utf-8")).decode("utf-8")
for h in backup_hosts
for h in self.BACKUP_AUTH_HOSTS
], self.AUTH_URL, "/api/auth")
logger.debug(f"Selected backup authentication host {auth_host}")
self.request = Request(mode="ip")
@@ -200,10 +212,15 @@ class Arlo(object):
raw=True
)
factor_id = next(
i for i in factors_body['data']['items']
if (i['factorType'] == 'EMAIL' or i['factorType'] == 'SMS')
and i['factorRole'] == "PRIMARY"
)['factorId']
iter([
i for i in factors_body['data']['items']
if (i['factorType'] == 'EMAIL' or i['factorType'] == 'SMS')
and i['factorRole'] == "PRIMARY"
]),
{}
).get('factorId')
if not factor_id:
raise Exception("Could not find valid 2FA method - is the primary 2FA set to either Email or SMS?")
# Start factor auth
start_auth_body = self.request.post(
@@ -227,7 +244,10 @@ class Arlo(object):
raw=True
)
self.request = Request()
if finish_auth_body.get('data', {}).get('token') is None:
raise Exception("Could not complete 2FA, maybe invalid token? If the error persists, please try reloading the plugin and logging in again.")
self.request = Request(mode="cloudscraper")
# Update Authorization code with new code
headers = {
@@ -282,17 +302,25 @@ class Arlo(object):
cameras[camera['deviceId']] = camera
# filter out cameras without basestation, where they are their own basestations
# for now, keep doorbells and sirens in the list so they get pings
proper_basestations = {}
# this is so battery-powered devices do not drain due to pings
# for wired devices, keep doorbells, sirens, and arloq in the list so they get pings
# we also add arlo baby devices (abc1000, abc1000a) since they are standalone-only
# and seem to want pings
devices_to_ping = {}
for basestation in basestations.values():
if basestation['deviceId'] == basestation.get('parentId') and basestation['deviceType'] not in ['doorbell', 'siren']:
if basestation['deviceId'] == basestation.get('parentId') and \
basestation['deviceType'] not in ['doorbell', 'siren', 'arloq', 'arloqs'] and \
basestation['modelId'].lower() not in ['abc1000', 'abc1000a']:
continue
proper_basestations[basestation['deviceId']] = basestation
# avd2001 is the battery doorbell, and we don't want to drain its battery, so disable pings
if basestation['modelId'].lower().startswith('avd2001'):
continue
devices_to_ping[basestation['deviceId']] = basestation
logger.info(f"Will send heartbeat to the following basestations: {list(proper_basestations.keys())}")
logger.info(f"Will send heartbeat to the following devices: {list(devices_to_ping.keys())}")
# start heartbeat loop with only basestations
asyncio.get_event_loop().create_task(heartbeat(self, list(proper_basestations.values())))
# start heartbeat loop with only pingable devices
asyncio.get_event_loop().create_task(heartbeat(self, list(devices_to_ping.values())))
# subscribe to all camera topics
topics = [
@@ -384,58 +412,135 @@ class Arlo(object):
basestation_id = basestation.get('deviceId')
return self.Notify(basestation, {"action":"set","resource":"subscriptions/"+self.user_id+"_web","publishResponse":False,"properties":{"devices":[basestation_id]}})
def SubscribeToMotionEvents(self, basestation, camera, callback):
def SubscribeToErrorEvents(self, basestation, camera, callback):
"""
Use this method to subscribe to error events. You must provide a callback function which will get called once per error event.
The callback function should have the following signature:
def callback(code, message)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
Returns the Task object that contains the subscription loop.
"""
resource = f"cameras/{camera.get('deviceId')}"
# Note: It looks like sometimes a message is returned as an 'is' action
# where a 'stateChangeReason' property contains the error message. This is
# a bit of a hack but we will listen to both events with an 'error' key as
# well as 'stateChangeReason' events.
def callbackwrapper(self, event):
if 'error' in event:
error = event['error']
elif 'properties' in event:
error = event['properties'].get('stateChangeReason', {})
else:
return None
message = error.get('message')
code = error.get('code')
stop = callback(code, message)
if not stop:
return None
return stop
return asyncio.get_event_loop().create_task(
self.HandleEvents(basestation, resource, ['error', ('is', 'stateChangeReason')], callbackwrapper)
)
def SubscribeToMotionEvents(self, basestation, camera, callback, logger) -> asyncio.Task:
"""
Use this method to subscribe to motion events. You must provide a callback function which will get called once per motion event.
The callback function should have the following signature:
def callback(self, event)
def callback(event)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
Returns the Task object that contains the subscription loop.
"""
resource = f"cameras/{camera.get('deviceId')}"
return self._subscribe_to_motion_or_audio_events(basestation, camera, callback, logger, "motionDetected")
def callbackwrapper(self, event):
properties = event.get('properties', {})
stop = None
if 'motionDetected' in properties:
stop = callback(properties['motionDetected'])
if not stop:
return None
return stop
return asyncio.get_event_loop().create_task(
self.HandleEvents(basestation, resource, [('is', 'motionDetected')], callbackwrapper)
)
def SubscribeToAudioEvents(self, basestation, camera, callback):
def SubscribeToAudioEvents(self, basestation, camera, callback, logger):
"""
Use this method to subscribe to audio events. You must provide a callback function which will get called once per audio event.
The callback function should have the following signature:
def callback(self, event)
def callback(event)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
Returns the Task object that contains the subscription loop.
"""
return self._subscribe_to_motion_or_audio_events(basestation, camera, callback, logger, "audioDetected")
def _subscribe_to_motion_or_audio_events(self, basestation, camera, callback, logger, event_key) -> asyncio.Task:
"""
Helper class to implement force reset of events (when event end signal is dropped) and delay of end
of event signals (when the sensor turns off and on quickly)
event_key is either motionDetected or audioDetected
"""
resource = f"cameras/{camera.get('deviceId')}"
# if we somehow miss the *Detected = False event, this task
# is used to force the caller to register the end of the event
force_reset_event_task: asyncio.Task = None
# when we receive a normal *Detected = False event, this
# task is used to delay the delivery in case the sensor
# registers an event immediately afterwards
delayed_event_end_task: asyncio.Task = None
async def reset_event(sleep_duration: float) -> None:
nonlocal force_reset_event_task, delayed_event_end_task
await asyncio.sleep(sleep_duration)
logger.debug(f"{event_key}: delivering False")
callback(False)
force_reset_event_task = None
delayed_event_end_task = None
def callbackwrapper(self, event):
nonlocal force_reset_event_task, delayed_event_end_task
properties = event.get('properties', {})
stop = None
if 'audioDetected' in properties:
stop = callback(properties['audioDetected'])
if event_key in properties:
event_detected = properties[event_key]
delivery_delay = 10
logger.debug(f"{event_key}: {event_detected} {'will delay delivery by ' + str(delivery_delay) + 's' if not event_detected else ''}".rstrip())
if force_reset_event_task:
logger.debug(f"{event_key}: cancelling previous force reset task")
force_reset_event_task.cancel()
force_reset_event_task = None
if delayed_event_end_task:
logger.debug(f"{event_key}: cancelling previous delay event task")
delayed_event_end_task.cancel()
delayed_event_end_task = None
if event_detected:
stop = callback(event_detected)
# schedule a callback to reset the sensor
# if we somehow miss the *Detected = False event
force_reset_event_task = asyncio.get_event_loop().create_task(reset_event(60))
else:
delayed_event_end_task = asyncio.get_event_loop().create_task(reset_event(delivery_delay))
if not stop:
return None
return stop
return asyncio.get_event_loop().create_task(
self.HandleEvents(basestation, resource, [('is', 'audioDetected')], callbackwrapper)
self.HandleEvents(basestation, resource, [('is', event_key)], callbackwrapper)
)
def SubscribeToBatteryEvents(self, basestation, camera, callback):
@@ -443,7 +548,7 @@ class Arlo(object):
Use this method to subscribe to battery events. You must provide a callback function which will get called once per battery event.
The callback function should have the following signature:
def callback(self, event)
def callback(event)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
@@ -470,7 +575,7 @@ class Arlo(object):
Use this method to subscribe to doorbell events. You must provide a callback function which will get called once per doorbell event.
The callback function should have the following signature:
def callback(self, event)
def callback(event)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
@@ -506,7 +611,7 @@ class Arlo(object):
Use this method to subscribe to pushToTalk SDP answer events. You must provide a callback function which will get called once per SDP event.
The callback function should have the following signature:
def callback(self, event)
def callback(event)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
@@ -534,7 +639,7 @@ class Arlo(object):
Use this method to subscribe to pushToTalk ICE candidate answer events. You must provide a callback function which will get called once per candidate event.
The callback function should have the following signature:
def callback(self, event)
def callback(event)
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
that has a big switch statement in it to handle all the various events Arlo produces.
@@ -629,7 +734,7 @@ class Arlo(object):
If you pass in a valid device type, as a string or a list, this method will return an array of just those devices that match that type. An example would be ['basestation', 'camera']
To filter provisioned or unprovisioned devices pass in a True/False value for filter_provisioned. By default both types are returned.
"""
devices = self.request.get(f'https://{self.BASE_URL}/hmsweb/v2/users/devices')
devices = self._getDevicesImpl()
if device_type:
devices = [ device for device in devices if device.get('deviceType') in device_type]
@@ -641,20 +746,42 @@ class Arlo(object):
return devices
async def StartStream(self, basestation, camera):
@cached(cache=TTLCache(maxsize=1, ttl=60))
def _getDevicesImpl(self):
devices = self.request.get(f'https://{self.BASE_URL}/hmsweb/v2/users/devices')
return devices
def GetDeviceCapabilities(self, device: dict) -> dict:
return self._getDeviceCapabilitiesImpl(device['modelId'].lower(), device['interfaceVersion'])
@cached(cache=TTLCache(maxsize=64, ttl=60))
def _getDeviceCapabilitiesImpl(self, model_id: str, interface_version: str) -> dict:
return self.request.get(
f'https://{self.BASE_URL}/resources/capabilities/{model_id}/{model_id}_{interface_version}.json',
raw=True
)
async def StartStream(self, basestation, camera, mode="rtsp"):
"""
This function returns the url of the rtsp video stream.
This stream needs to be called within 30 seconds or else it becomes invalid.
It can be streamed with: ffmpeg -re -i 'rtsps://<url>' -acodec copy -vcodec copy test.mp4
The request to /users/devices/startStream returns: { url:rtsp://<url>:443/vzmodulelive?egressToken=b<xx>&userAgent=iOS&cameraId=<camid>}
If mode is set to "dash", returns the url to the mpd file for DASH streaming. Note that DASH
has very specific header requirements - see GetMPDHeaders()
"""
resource = f"cameras/{camera.get('deviceId')}"
if mode not in ["rtsp", "dash"]:
raise ValueError("mode must be 'rtsp' or 'dash'")
# nonlocal variable hack for Python 2.x.
class nl:
stream_url_dict = None
def trigger(self):
ua = USER_AGENTS['arlo'] if mode == "rtsp" else USER_AGENTS["firefox"]
nl.stream_url_dict = self.request.post(
f'https://{self.BASE_URL}/hmsweb/users/devices/startStream',
params={
@@ -670,14 +797,19 @@ class Arlo(object):
"cameraId": camera.get('deviceId')
}
},
headers={"xcloudId":camera.get('xCloudId')}
headers={"xcloudId":camera.get('xCloudId'), 'User-Agent': ua}
)
def callback(self, event):
#return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
if "error" in event:
return None
properties = event.get("properties", {})
if properties.get("activityState") == "userStreamActive":
return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
if mode == "rtsp":
return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
else:
return nl.stream_url_dict['url'].replace(":80", "")
return None
return await self.TriggerAndHandleEvent(
@@ -688,6 +820,37 @@ class Arlo(object):
callback,
)
def GetMPDHeaders(self, url: str) -> dict:
parsed = urlparse(url)
query = parse_qs(parsed.query)
headers = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "en-US,en;q=0.9",
"Connection": "keep-alive",
"DNT": "1",
"Egress-Token": query['egressToken'][0], # this is very important
"Origin": "https://my.arlo.com",
"Referer": "https://my.arlo.com/",
"User-Agent": USER_AGENTS["firefox"],
}
return headers
def GetSIPInfo(self):
resp = self.request.get(f'https://{self.BASE_URL}/hmsweb/users/devices/sipInfo')
return resp
def GetSIPInfoV2(self, camera):
resp = self.request.get(
f'https://{self.BASE_URL}/hmsweb/users/devices/sipInfo/v2',
headers={
"xcloudId": camera.get('xCloudId'),
"cameraId": camera.get('deviceId'),
}
)
return resp
def StartPushToTalk(self, basestation, camera):
url = f'https://{self.BASE_URL}/hmsweb/users/devices/{self.user_id}_{camera.get("deviceId")}/pushtotalk'
resp = self.request.get(url)
@@ -745,6 +908,8 @@ class Arlo(object):
)
def callback(self, event):
if "error" in event:
return None
properties = event.get("properties", {})
url = properties.get("presignedFullFrameSnapshotUrl")
if url:
@@ -870,6 +1035,32 @@ class Arlo(object):
},
})
def NightlightOn(self, basestation):
resource = f"cameras/{basestation.get('deviceId')}"
return self.Notify(basestation, {
"action": "set",
"resource": resource,
"publishResponse": True,
"properties": {
"nightLight": {
"enabled": True
}
}
})
def NightlightOff(self, basestation):
resource = f"cameras/{basestation.get('deviceId')}"
return self.Notify(basestation, {
"action": "set",
"resource": resource,
"publishResponse": True,
"properties": {
"nightLight": {
"enabled": False
}
}
})
def GetLibrary(self, device, from_date: datetime, to_date: datetime):
"""
This call returns the following:

View File

@@ -2,28 +2,30 @@ import ssl
from socket import setdefaulttimeout
import requests
from requests_toolbelt.adapters import host_header_ssl
from cryptography import x509
from cryptography.x509.oid import ExtensionOID
import scrypted_arlo_go
from .logging import logger
setdefaulttimeout(5)
setdefaulttimeout(15)
def pick_host(hosts, hostname_to_match, endpoint_to_test):
session = requests.Session()
session.mount('https://', host_header_ssl.HostHeaderSSLAdapter())
setdefaulttimeout(5)
for host in hosts:
try:
c = ssl.get_server_certificate((host, 443))
c = x509.load_pem_x509_certificate(c.encode("utf-8"))
if hostname_to_match in c.subject.rfc4514_string() or \
hostname_to_match in c.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value.get_values_for_type(x509.DNSName):
try:
session = requests.Session()
session.mount('https://', host_header_ssl.HostHeaderSSLAdapter())
for host in hosts:
try:
c = ssl.get_server_certificate((host, 443))
scrypted_arlo_go.VerifyCertHostname(c, hostname_to_match)
r = session.post(f"https://{host}{endpoint_to_test}", headers={"Host": hostname_to_match})
r.raise_for_status()
return host
except Exception as e:
logger.warning(f"{host} is invalid: {e}")
raise Exception("no valid hosts found!")
except Exception as e:
logger.warning(f"{host} is invalid: {e}")
raise Exception("no valid hosts found!")
finally:
setdefaulttimeout(15)

View File

@@ -9,7 +9,7 @@ logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
# log formatting
fmt = logging.Formatter("[Arlo] %(message)s")
fmt = logging.Formatter("[Arlo]: %(message)s")
ch.setFormatter(fmt)
# configure handler to logger

View File

@@ -14,6 +14,7 @@
# limitations under the License.
##
from functools import partialmethod
import requests
from requests.exceptions import HTTPError
from requests_toolbelt.adapters import host_header_ssl
@@ -21,6 +22,15 @@ import cloudscraper
import time
import uuid
from .logging import logger
try:
from curl_cffi import requests as curl_cffi_requests
HAS_CURL_CFFI = True
except:
HAS_CURL_CFFI = False
#from requests_toolbelt.utils import dump
#def print_raw_http(response):
# data = dump.dump_all(response, request_prefix=b'', response_prefix=b'')
@@ -29,13 +39,21 @@ import uuid
class Request(object):
"""HTTP helper class"""
def __init__(self, timeout=5, mode="cloudscraper"):
if mode == "cloudscraper":
def __init__(self, timeout=5, mode="curl" if HAS_CURL_CFFI else "cloudscraper"):
if mode == "curl":
logger.debug("HTTP helper using curl_cffi")
self.session = curl_cffi_requests.Session(impersonate="chrome110")
elif mode == "cloudscraper":
logger.debug("HTTP helper using cloudscraper")
from .arlo_async import USER_AGENTS
self.session = cloudscraper.CloudScraper(browser={"custom": USER_AGENTS["arlo"]})
self.session = cloudscraper.CloudScraper(browser={"custom": USER_AGENTS["android"]})
elif mode == "ip":
logger.debug("HTTP helper using requests with HostHeaderSSLAdapter")
self.session = requests.Session()
self.session.mount('https://', host_header_ssl.HostHeaderSSLAdapter())
else:
logger.debug("HTTP helper using requests")
self.session = requests.Session()
self.timeout = timeout
def gen_event_id(self):

View File

@@ -3,7 +3,7 @@ import json
import sseclient
import threading
from .stream_async import Stream
from .stream_async import Stream
from .logging import logger
@@ -28,7 +28,7 @@ class EventStream(Stream):
continue
try:
response = json.loads(event.data)
response = json.loads(event.data.strip())
except json.JSONDecodeError:
continue
@@ -36,6 +36,7 @@ class EventStream(Stream):
if self.event_stream_stop_event.is_set() or \
self.shutting_down_stream is event_stream:
logger.info(f"SSE {id(event_stream)} disconnected")
self.shutting_down_stream = None
return None
elif response.get('status') == 'connected':
if not self.connected:
@@ -59,10 +60,10 @@ class EventStream(Stream):
self.shutting_down_stream = self.event_stream
self.event_stream = None
await self.start()
# give it an extra sleep to ensure any previous connections have disconnected properly
# this is so we can mark reconnecting to False properly
await asyncio.sleep(1)
self.shutting_down_stream = None
while self.shutting_down_stream is not None:
# ensure any previous connections have disconnected properly
# this is so we can mark reconnecting to False properly
await asyncio.sleep(1)
self.reconnecting = False
def subscribe(self, topics):

View File

@@ -177,22 +177,25 @@ class Stream:
now = time.time()
event = StreamEvent(response, now, now + self.expire)
self._queue_impl(key, event)
if key not in self.queues:
q = self.queues[key] = asyncio.Queue()
else:
q = self.queues[key]
q.put_nowait(event)
# specialized setup for error responses
if 'error' in response:
key = f"{resource}/error"
self._queue_impl(key, event)
# for optimized lookups, notify listeners of individual properties
properties = response.get('properties', {})
for property in properties.keys():
key = f"{resource}/{action}/{property}"
if key not in self.queues:
q = self.queues[key] = asyncio.Queue()
else:
q = self.queues[key]
q.put_nowait(event)
self._queue_impl(key, event)
def _queue_impl(self, key, event):
if key not in self.queues:
q = self.queues[key] = asyncio.Queue()
else:
q = self.queues[key]
q.put_nowait(event)
def requeue(self, event, resource, action, property=None):
if not property:

View File

@@ -18,6 +18,7 @@ class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTa
nativeId: str = None
arlo_device: dict = None
arlo_basestation: dict = None
arlo_capabilities: dict = None
provider: ArloProvider = None
stop_subscriptions: bool = False
@@ -32,6 +33,12 @@ class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTa
self.provider = provider
self.logger.setLevel(self.provider.get_current_log_level())
try:
self.arlo_capabilities = self.provider.arlo.GetDeviceCapabilities(self.arlo_device)
except Exception as e:
self.logger.warning(f"Could not load device capabilities: {e}")
self.arlo_capabilities = {}
def __del__(self) -> None:
self.stop_subscriptions = True
self.cancel_pending_tasks()

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from typing import List, TYPE_CHECKING
from scrypted_sdk import ScryptedDeviceBase
from scrypted_sdk.types import Device, DeviceProvider, ScryptedInterface, ScryptedDeviceType
from scrypted_sdk.types import Device, DeviceProvider, Setting, SettingValue, Settings, ScryptedInterface, ScryptedDeviceType
from .base import ArloDeviceBase
from .vss import ArloSirenVirtualSecuritySystem
@@ -13,7 +13,7 @@ if TYPE_CHECKING:
from .provider import ArloProvider
class ArloBasestation(ArloDeviceBase, DeviceProvider):
class ArloBasestation(ArloDeviceBase, DeviceProvider, Settings):
MODELS_WITH_SIRENS = [
"vmb4000",
"vmb4500"
@@ -29,7 +29,10 @@ class ArloBasestation(ArloDeviceBase, DeviceProvider):
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloBasestation.MODELS_WITH_SIRENS])
def get_applicable_interfaces(self) -> List[str]:
return [ScryptedInterface.DeviceProvider.value]
return [
ScryptedInterface.DeviceProvider.value,
ScryptedInterface.Settings.value,
]
def get_device_type(self) -> str:
return ScryptedDeviceType.DeviceProvider.value
@@ -68,4 +71,20 @@ class ArloBasestation(ArloDeviceBase, DeviceProvider):
vss_id = f'{self.arlo_device["deviceId"]}.vss'
if not self.vss:
self.vss = ArloSirenVirtualSecuritySystem(vss_id, self.arlo_device, self.arlo_basestation, self.provider, self)
return self.vss
return self.vss
async def getSettings(self) -> List[Setting]:
return [
{
"group": "General",
"key": "print_debug",
"title": "Debug Info",
"description": "Prints information about this device to console.",
"type": "button",
}
]
async def putSetting(self, key: str, value: SettingValue) -> None:
if key == "print_debug":
self.logger.info(f"Device Capabilities: {self.arlo_capabilities}")
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)

View File

@@ -5,18 +5,19 @@ import aiohttp
from async_timeout import timeout as async_timeout
from datetime import datetime, timedelta
import json
import threading
import socket
import time
import threading
from typing import List, TYPE_CHECKING
import scrypted_arlo_go
import scrypted_sdk
from scrypted_sdk.types import Setting, Settings, SettingValue, Device, Camera, VideoCamera, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, Charger, ChargeState, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
from scrypted_sdk.types import Setting, Settings, SettingValue, Device, Camera, VideoCamera, RequestMediaStreamOptions, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, Charger, ChargeState, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
from .debug import EXPERIMENTAL
from .arlo.arlo_async import USER_AGENTS
from .base import ArloDeviceBase
from .spotlight import ArloSpotlight, ArloFloodlight
from .spotlight import ArloSpotlight, ArloFloodlight, ArloNightlight
from .vss import ArloSirenVirtualSecuritySystem
from .child_process import HeartbeatChildProcess
from .util import BackgroundTaskMixin, async_print_exception_guard
@@ -26,13 +27,30 @@ if TYPE_CHECKING:
from .provider import ArloProvider
class ArloCameraIntercomSession(BackgroundTaskMixin):
def __init__(self, camera: ArloCamera) -> None:
super().__init__()
self.camera = camera
self.logger = camera.logger
self.provider = camera.provider
self.arlo_device = camera.arlo_device
self.arlo_basestation = camera.arlo_basestation
async def initialize_push_to_talk(self, media: MediaObject) -> None:
raise NotImplementedError("not implemented")
async def shutdown(self) -> None:
raise NotImplementedError("not implemented")
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider, VideoClips, MotionSensor, AudioSensor, Battery, Charger):
MODELS_WITH_SPOTLIGHTS = [
"vmc4040p",
"vmc2030",
"vmc2032",
"vmc4040p",
"vmc4041p",
"vmc4050p",
"vmc4060p",
"vmc5040",
"vml2030",
"vml4030",
@@ -40,60 +58,85 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
MODELS_WITH_FLOODLIGHTS = ["fb1001"]
MODELS_WITH_NIGHTLIGHTS = [
"abc1000",
"abc1000a",
]
MODELS_WITH_SIRENS = [
"vmc4040p",
"fb1001",
"vmc2030",
"vmc2020",
"vmc2030",
"vmc2032",
"vmc4030",
"vmc4030p",
"vmc4040p",
"vmc4041p",
"vmc4050p",
"vmc4060p",
"vmc5040",
"vml2030",
"vmc4030",
"vml4030",
"vmc4030p",
]
MODELS_WITH_AUDIO_SENSORS = [
"vmc4040p",
"abc1000",
"abc1000a",
"fb1001",
"vmc4041p",
"vmc4050p",
"vmc5040",
"vmc3040",
"vmc3040s",
"vmc4030",
"vml4030",
"vmc4030p",
"vmc4040p",
"vmc4041p",
"vmc4050p",
"vmc5040",
"vml4030",
]
MODELS_WITHOUT_BATTERY = [
"avd1001",
"vmc2040",
"vmc3040",
"vmc3040s",
]
timeout: int = 30
intercom_session = None
intercom_session: ArloCameraIntercomSession = None
light: ArloSpotlight = None
vss: ArloSirenVirtualSecuritySystem = None
picture_lock: asyncio.Lock = None
# eco mode bookkeeping
picture_lock: asyncio.Lock = None
last_picture: bytes = None
last_picture_time: datetime = datetime(1970, 1, 1)
# socket logger
logger_loop: asyncio.AbstractEventLoop = None
logger_server: asyncio.AbstractServer = None
logger_server_port: int = 0
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
self.picture_lock = asyncio.Lock()
self.start_error_subscription()
self.start_motion_subscription()
self.start_audio_subscription()
self.start_battery_subscription()
self.create_task(self.delayed_init())
def __del__(self) -> None:
super().__del__()
def logger_exit_callback():
self.logger_server.close()
self.logger_loop.stop()
self.logger_loop.close()
self.logger_loop.call_soon_threadsafe(logger_exit_callback)
async def delayed_init(self) -> None:
await self.create_tcp_logger_server()
if not self.has_battery:
return
@@ -111,13 +154,59 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
await asyncio.sleep(0.1)
iterations += 1
@async_print_exception_guard
async def create_tcp_logger_server(self) -> None:
self.logger_loop = asyncio.new_event_loop()
def thread_main():
asyncio.set_event_loop(self.logger_loop)
self.logger_loop.run_forever()
threading.Thread(target=thread_main).start()
# this is a bit convoluted since we need the async functions to run in the
# logger loop thread instead of in the current thread
def setup_callback():
async def callback(reader, writer):
try:
while not reader.at_eof():
line = await reader.readline()
if not line:
break
line = str(line, 'utf-8')
line = line.rstrip()
self.logger.info(line)
writer.close()
await writer.wait_closed()
except Exception:
self.logger.exception("Logger server callback raised an exception")
async def setup():
self.logger_server = await asyncio.start_server(callback, host='localhost', port=0, family=socket.AF_INET, flags=socket.SOCK_STREAM)
self.logger_server_port = self.logger_server.sockets[0].getsockname()[1]
self.logger.info(f"Started logging server at localhost:{self.logger_server_port}")
self.logger_loop.create_task(setup())
self.logger_loop.call_soon_threadsafe(setup_callback)
def start_error_subscription(self) -> None:
def callback(code, message):
self.logger.error(f"Arlo returned error code {code} with message: {message}")
return self.stop_subscriptions
self.register_task(
self.provider.arlo.SubscribeToErrorEvents(self.arlo_basestation, self.arlo_device, callback)
)
def start_motion_subscription(self) -> None:
def callback(motionDetected):
self.motionDetected = motionDetected
return self.stop_subscriptions
self.register_task(
self.provider.arlo.SubscribeToMotionEvents(self.arlo_basestation, self.arlo_device, callback)
self.provider.arlo.SubscribeToMotionEvents(self.arlo_basestation, self.arlo_device, callback, self.logger)
)
def start_audio_subscription(self) -> None:
@@ -129,7 +218,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
return self.stop_subscriptions
self.register_task(
self.provider.arlo.SubscribeToAudioEvents(self.arlo_basestation, self.arlo_device, callback)
self.provider.arlo.SubscribeToAudioEvents(self.arlo_basestation, self.arlo_device, callback, self.logger)
)
def start_battery_subscription(self) -> None:
@@ -152,14 +241,8 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
ScryptedInterface.Settings.value,
])
if EXPERIMENTAL:
if self.two_way_audio:
results.discard(ScryptedInterface.RTCSignalingChannel.value)
results.add(ScryptedInterface.Intercom.value)
if self.webrtc_emulation:
results.add(ScryptedInterface.RTCSignalingChannel.value)
results.discard(ScryptedInterface.Intercom.value)
if self.has_push_to_talk:
results.add(ScryptedInterface.Intercom.value)
if self.has_battery:
results.add(ScryptedInterface.Battery.value)
@@ -174,11 +257,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
if self.has_cloud_recording:
results.add(ScryptedInterface.VideoClips.value)
if EXPERIMENTAL:
if not self._can_push_to_talk():
results.discard(ScryptedInterface.RTCSignalingChannel.value)
results.discard(ScryptedInterface.Intercom.value)
return list(results)
def get_device_type(self) -> str:
@@ -186,8 +264,8 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
def get_builtin_child_device_manifests(self) -> List[Device]:
results = []
if self.has_spotlight or self.has_floodlight:
light = self.get_or_create_spotlight_or_floodlight()
if self.has_spotlight or self.has_floodlight or self.has_nightlight:
light = self.get_or_create_light()
results.append({
"info": {
"model": f"{self.arlo_device['modelId']} {self.arlo_device['properties'].get('hwVersion', '')}".strip(),
@@ -196,7 +274,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
"serialNumber": self.arlo_device["deviceId"],
},
"nativeId": light.nativeId,
"name": f'{self.arlo_device["deviceName"]} {"Spotlight" if self.has_spotlight else "Floodlight"}',
"name": f'{self.arlo_device["deviceName"]} {"Spotlight" if self.has_spotlight else "Floodlight" if self.has_floodlight else "Nightlight"}',
"interfaces": light.get_applicable_interfaces(),
"type": light.get_device_type(),
"providerNativeId": self.nativeId,
@@ -220,23 +298,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
] + vss.get_builtin_child_device_manifests())
return results
@property
def webrtc_emulation(self) -> bool:
if self.storage:
return True if self.storage.getItem("webrtc_emulation") else False
else:
return False
@property
def two_way_audio(self) -> bool:
if self.storage:
val = self.storage.getItem("two_way_audio")
if val is None:
val = True
return val
else:
return True
@property
def wired_to_power(self) -> bool:
if self.storage:
@@ -252,7 +313,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
return False
@property
def snapshot_throttle_interval(self) -> bool:
def snapshot_throttle_interval(self) -> int:
interval = self.storage.getItem("snapshot_throttle_interval")
if interval is None:
interval = 60
@@ -271,6 +332,10 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
def has_floodlight(self) -> bool:
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_FLOODLIGHTS])
@property
def has_nightlight(self) -> bool:
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_NIGHTLIGHTS])
@property
def has_siren(self) -> bool:
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_SIRENS])
@@ -283,6 +348,14 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
def has_battery(self) -> bool:
return not any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITHOUT_BATTERY])
@property
def has_push_to_talk(self) -> bool:
return bool(self.arlo_capabilities.get("Capabilities", {}).get("PushToTalk", {}).get("fullDuplex"))
@property
def uses_sip_push_to_talk(self) -> bool:
return "sip" in self.arlo_capabilities.get("Capabilities", {}).get("PushToTalk", {}).get("signal", [])
async def getSettings(self) -> List[Setting]:
result = []
if self.has_battery:
@@ -323,26 +396,15 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
"type": "number",
}
)
if self._can_push_to_talk() and EXPERIMENTAL:
result.extend([
{
"group": "General",
"key": "two_way_audio",
"title": "(Experimental) Enable native two-way audio",
"value": self.two_way_audio,
"description": "Enables two-way audio for this device. Not yet completely functional on all audio senders.",
"type": "boolean",
},
{
"group": "General",
"key": "webrtc_emulation",
"title": "(Highly Experimental) Emulate WebRTC Camera",
"value": self.webrtc_emulation,
"description": "Configures the plugin to offer this device as a WebRTC camera, merging video/audio stream with two-way audio. "
"If enabled, takes precedence over native two-way audio. May use increased system resources.",
"type": "boolean",
},
])
result.append(
{
"group": "General",
"key": "print_debug",
"title": "Debug Info",
"description": "Prints information about this device to console.",
"type": "button",
}
)
return result
@async_print_exception_guard
@@ -351,11 +413,13 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
return
if key in ["webrtc_emulation", "two_way_audio", "wired_to_power"]:
if key in ["wired_to_power"]:
self.storage.setItem(key, value == "true" or value == True)
await self.provider.discover_devices()
elif key in ["eco_mode"]:
self.storage.setItem(key, value == "true" or value == True)
elif key == "print_debug":
self.logger.info(f"Device Capabilities: {self.arlo_capabilities}")
else:
self.storage.setItem(key, value)
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
@@ -396,7 +460,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
self.logger.debug(f"Got snapshot URL for at {pic_url}")
if pic_url is None:
raise Exception("Error taking snapshot")
raise Exception("Error taking snapshot: no url returned")
async with async_timeout(self.timeout):
async with aiohttp.ClientSession() as session:
@@ -408,8 +472,8 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
return await scrypted_sdk.mediaManager.createMediaObject(self.last_picture, "image/jpeg")
async def getVideoStreamOptions(self) -> List[ResponseMediaStreamOptions]:
return [
async def getVideoStreamOptions(self, id: str = None) -> List[ResponseMediaStreamOptions]:
options = [
{
"id": 'default',
"name": 'Cloud RTSP',
@@ -423,73 +487,86 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
"source": 'cloud',
"tool": 'scrypted',
"userConfigurable": False,
},
{
"id": 'dash',
"name": 'Cloud DASH',
"container": 'dash',
"video": {
"codec": 'unknown',
},
"audio": None if self.arlo_device.get("modelId") == "VMC3030" else {
"codec": 'unknown',
},
"source": 'cloud',
"tool": 'ffmpeg',
"userConfigurable": False,
}
]
async def _getVideoStreamURL(self) -> str:
self.logger.info("Requesting stream")
rtsp_url = await asyncio.wait_for(self.provider.arlo.StartStream(self.arlo_basestation, self.arlo_device), timeout=self.timeout)
self.logger.debug(f"Got stream URL at {rtsp_url}")
return rtsp_url
if id is None:
return options
async def getVideoStream(self, options: dict = None) -> MediaObject:
return next(iter([o for o in options if o['id'] == id]))
async def _getVideoStreamURL(self, container: str) -> str:
self.logger.info(f"Requesting {container} stream")
url = await asyncio.wait_for(self.provider.arlo.StartStream(self.arlo_basestation, self.arlo_device, mode=container), timeout=self.timeout)
self.logger.debug(f"Got {container} stream URL at {url}")
return url
@async_print_exception_guard
async def getVideoStream(self, options: RequestMediaStreamOptions = None) -> MediaObject:
self.logger.debug("Entered getVideoStream")
rtsp_url = await self._getVideoStreamURL()
mso = (await self.getVideoStreamOptions())[0]
mso = await self.getVideoStreamOptions(id=options["id"])
mso['refreshAt'] = round(time.time() * 1000) + 30 * 60 * 1000
container = mso["container"]
url = await self._getVideoStreamURL(container)
additional_ffmpeg_args = []
if container == "dash":
headers = self.provider.arlo.GetMPDHeaders(url)
ffmpeg_headers = '\r\n'.join([
f'{k}: {v}'
for k, v in headers.items()
])
additional_ffmpeg_args = ['-headers', ffmpeg_headers+'\r\n']
ffmpeg_input = {
'url': rtsp_url,
'container': 'rtsp',
'url': url,
'container': container,
'mediaStreamOptions': mso,
'inputArguments': [
'-f', 'rtsp',
'-i', rtsp_url,
'-f', container,
*additional_ffmpeg_args,
'-i', url,
]
}
return await scrypted_sdk.mediaManager.createFFmpegMediaObject(ffmpeg_input)
@async_print_exception_guard
async def startRTCSignalingSession(self, scrypted_session):
plugin_session = ArloCameraRTCSignalingSession(self)
await plugin_session.initialize()
scrypted_setup = {
"type": "offer",
"audio": {
"direction": "sendrecv" if self._can_push_to_talk() else "recvonly",
},
"video": {
"direction": "recvonly",
}
}
plugin_setup = {}
scrypted_offer = await scrypted_session.createLocalDescription("offer", scrypted_setup, sendIceCandidate=plugin_session.addIceCandidate)
self.logger.info(f"Scrypted offer sdp:\n{scrypted_offer['sdp']}")
await plugin_session.setRemoteDescription(scrypted_offer, plugin_setup)
plugin_answer = await plugin_session.createLocalDescription("answer", plugin_setup, scrypted_session.sendIceCandidate)
self.logger.info(f"Scrypted answer sdp:\n{plugin_answer['sdp']}")
await scrypted_session.setRemoteDescription(plugin_answer, scrypted_setup)
return ArloCameraRTCSessionControl(plugin_session)
async def startIntercom(self, media) -> None:
async def startIntercom(self, media: MediaObject) -> None:
self.logger.info("Starting intercom")
self.intercom_session = ArloCameraRTCSignalingSession(self)
if self.uses_sip_push_to_talk:
# signaling happens over sip
self.intercom_session = ArloCameraSIPIntercomSession(self)
else:
# we need to do signaling through arlo cloud apis
self.intercom_session = ArloCameraWebRTCIntercomSession(self)
await self.intercom_session.initialize_push_to_talk(media)
self.logger.info("Intercom initialized")
@async_print_exception_guard
async def stopIntercom(self) -> None:
self.logger.info("Stopping intercom")
if self.intercom_session is not None:
await self.intercom_session.shutdown()
self.intercom_session = None
def _can_push_to_talk(self) -> bool:
# Right now, only implement push to talk for basestation cameras
return self.arlo_device["deviceId"] != self.arlo_device["parentId"]
async def getVideoClip(self, videoId: str) -> MediaObject:
self.logger.info(f"Getting video clip {videoId}")
@@ -551,17 +628,17 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
@async_print_exception_guard
async def removeVideoClips(self, videoClipIds: List[str]) -> None:
# Arlo does support deleting, but let's be safe and disable that
raise Exception("deleting Arlo video clips is not implemented by this plugin")
# Arlo Cloud does support deleting, but let's be safe and not expose that here
raise Exception("deleting Arlo video clips is not implemented by this plugin - please delete clips through the Arlo app")
async def getDevice(self, nativeId: str) -> ArloDeviceBase:
if (nativeId.endswith("spotlight") and self.has_spotlight) or (nativeId.endswith("floodlight") and self.has_floodlight):
return self.get_or_create_spotlight_or_floodlight()
if (nativeId.endswith("spotlight") and self.has_spotlight) or (nativeId.endswith("floodlight") and self.has_floodlight) or (nativeId.endswith("nightlight") and self.has_nightlight):
return self.get_or_create_light()
if nativeId.endswith("vss") and self.has_siren:
return self.get_or_create_vss()
return None
def get_or_create_spotlight_or_floodlight(self) -> ArloSpotlight:
def get_or_create_light(self) -> ArloSpotlight:
if self.has_spotlight:
light_id = f'{self.arlo_device["deviceId"]}.spotlight'
if not self.light:
@@ -570,6 +647,10 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
light_id = f'{self.arlo_device["deviceId"]}.floodlight'
if not self.light:
self.light = ArloFloodlight(light_id, self.arlo_device, self.arlo_basestation, self.provider, self)
elif self.has_nightlight:
light_id = f'{self.arlo_device["deviceId"]}.nightlight'
if not self.light:
self.light = ArloNightlight(light_id, self.arlo_device, self.provider, self)
return self.light
def get_or_create_vss(self) -> ArloSirenVirtualSecuritySystem:
@@ -580,31 +661,24 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
return self.vss
class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
def __init__(self, camera):
super().__init__()
self.camera = camera
self.logger = camera.logger
self.provider = camera.provider
self.arlo_device = camera.arlo_device
self.arlo_basestation = camera.arlo_basestation
class ArloCameraWebRTCIntercomSession(ArloCameraIntercomSession):
def __init__(self, camera: ArloCamera) -> None:
super().__init__(camera)
self.ffmpeg_subprocess = None
self.intercom_ffmpeg_subprocess = None
self.scrypted_pc = None
self.arlo_pc = None
self.arlo_sdp_answered = False
self.intercom_ffmpeg_subprocess = None
self.stop_subscriptions = False
self.start_sdp_answer_subscription()
self.start_candidate_answer_subscription()
def __del__(self):
def __del__(self) -> None:
self.stop_subscriptions = True
self.cancel_pending_tasks()
def start_sdp_answer_subscription(self):
def start_sdp_answer_subscription(self) -> None:
def callback(sdp):
if self.arlo_pc and not self.arlo_sdp_answered:
if "a=mid:" not in sdp:
@@ -622,7 +696,7 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
self.provider.arlo.SubscribeToSDPAnswers(self.arlo_basestation, self.arlo_device, callback)
)
def start_candidate_answer_subscription(self):
def start_candidate_answer_subscription(self) -> None:
def callback(candidate):
if self.arlo_pc:
prefix = "a=candidate:"
@@ -639,24 +713,27 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
self.provider.arlo.SubscribeToCandidateAnswers(self.arlo_basestation, self.arlo_device, callback)
)
async def initialize(self):
self.logger.info("Initializing video stream for RTC")
rtsp_url = await self.camera._getVideoStreamURL()
@async_print_exception_guard
async def initialize_push_to_talk(self, media: MediaObject) -> None:
self.logger.info("Initializing push to talk")
cfg = scrypted_arlo_go.WebRTCConfiguration(
ICEServers=scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string(["turn:turn0.clockworkmod.com", "turn:n0.clockworkmod.com", "turn:n1.clockworkmod.com"]),
"foo",
"bar"
)
])
)
cfg = scrypted_arlo_go.WebRTCConfiguration()
self.scrypted_pc = scrypted_arlo_go.NewWebRTCManager("Arlo "+self.camera.logger_name, cfg)
session_id, ice_servers = self.provider.arlo.StartPushToTalk(self.arlo_basestation, self.arlo_device)
self.logger.debug(f"Received ice servers: {[ice['url'] for ice in ice_servers]}")
audio_port = self.scrypted_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
video_port = self.scrypted_pc.InitializeVideoRTPListener(scrypted_arlo_go.WebRTCMimeTypeH264)
ice_servers = scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string([ice['url']]),
ice.get('username', ''),
ice.get('credential', '')
)
for ice in ice_servers
])
self.arlo_pc = scrypted_arlo_go.NewWebRTCManager(self.camera.logger_server_port, ice_servers)
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
audio_port = self.arlo_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
ffmpeg_args = [
@@ -665,200 +742,163 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
"-loglevel", "error",
"-analyzeduration", "0",
"-fflags", "-nobuffer",
"-max_probe_packets", "2",
"-vcodec", "h264",
"-acodec", "aac",
"-i", rtsp_url,
"-an",
"-vcodec", "copy",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{video_port}",
"-vn",
"-probesize", "500000",
*ffmpeg_params["inputArguments"],
"-acodec", "libopus",
"-flags", "+global_header",
"-vbr", "off",
"-ar", "48k",
"-b:a", "32k",
"-bufsize", "96k",
"-ac", "2",
"-application", "lowdelay",
"-dn", "-sn", "-vn",
"-frame_duration", "20",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
]
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with {ffmpeg_args}")
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with '{' '.join(ffmpeg_args)}'")
self.ffmpeg_subprocess = HeartbeatChildProcess("Arlo "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
self.ffmpeg_subprocess.start()
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("FFmpeg", self.camera.logger_server_port, ffmpeg_path, *ffmpeg_args)
self.intercom_ffmpeg_subprocess.start()
if self.camera._can_push_to_talk():
self.create_task(self.initialize_push_to_talk())
self.sdp_answered = False
async def initialize_push_to_talk(self, media=None):
try:
self.logger.info("Initializing push to talk")
offer = self.arlo_pc.CreateOffer()
offer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(offer)
self.logger.info(f"Arlo offer sdp:\n{offer_sdp}")
session_id, ice_servers = self.provider.arlo.StartPushToTalk(self.arlo_basestation, self.arlo_device)
self.logger.debug(f"Received ice servers: {[ice['url'] for ice in ice_servers]}")
self.arlo_pc.SetLocalDescription(offer)
cfg = scrypted_arlo_go.WebRTCConfiguration(
ICEServers=scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string([ice['url']]),
ice.get('username', ''),
ice.get('credential', '')
self.provider.arlo.NotifyPushToTalkSDP(
self.arlo_basestation, self.arlo_device,
session_id, offer_sdp
)
def trickle_candidates():
count = 0
try:
while True:
candidate = self.arlo_pc.GetNextICECandidate()
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
scrypted_arlo_go.WebRTCICECandidate(handle=candidate.handle).ToJSON()
).Candidate
self.logger.debug(f"Sending candidate to Arlo: {candidate}")
self.provider.arlo.NotifyPushToTalkCandidate(
self.arlo_basestation, self.arlo_device,
session_id, candidate,
)
for ice in ice_servers
])
)
self.arlo_pc = scrypted_arlo_go.NewWebRTCManager("Arlo "+self.camera.logger_name, cfg)
count += 1
except RuntimeError as e:
if str(e) == "no more candidates":
self.logger.debug(f"End of candidates, found {count} candidate(s)")
else:
self.logger.exception("Exception while processing trickle candidates")
except Exception:
self.logger.exception("Exception while processing trickle candidates")
if media is not None:
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
audio_port = self.arlo_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
# we can trickle candidates asynchronously so the caller to startIntercom
# knows we are ready to receive packets
threading.Thread(target=trickle_candidates).start()
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
ffmpeg_args = [
"-y",
"-hide_banner",
"-loglevel", "error",
"-analyzeduration", "0",
"-fflags", "-nobuffer",
"-probesize", "500000",
*ffmpeg_params["inputArguments"],
"-vn",
"-acodec", "libopus",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
]
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with {ffmpeg_args}")
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
self.intercom_ffmpeg_subprocess.start()
else:
self.logger.debug("Starting audio track forwarder")
self.scrypted_pc.ForwardAudioTo(self.arlo_pc)
self.logger.debug("Started audio track forwarder")
self.sdp_answered = False
offer = self.arlo_pc.CreateOffer()
offer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(offer)
self.logger.info(f"Arlo offer sdp:\n{offer_sdp}")
self.arlo_pc.SetLocalDescription(offer)
self.provider.arlo.NotifyPushToTalkSDP(
self.arlo_basestation, self.arlo_device,
session_id, offer_sdp
)
def forward_candidates():
try:
candidates = self.arlo_pc.WaitAndGetICECandidates()
self.logger.debug(f"Gathered {len(candidates)} candidates")
for candidate in candidates:
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
).Candidate
self.logger.debug(f"Sending candidate to Arlo: {candidate}")
self.provider.arlo.NotifyPushToTalkCandidate(
self.arlo_basestation, self.arlo_device,
session_id, candidate,
)
except Exception as e:
self.logger.error(e)
t = threading.Thread(target=forward_candidates)
t.start()
except Exception as e:
self.logger.error(e)
async def createLocalDescription(self, type, setup, sendIceCandidate=None):
if type == "offer":
raise Exception("can only create answers in ArloCameraRTCSignalingSession.createLocalDescription")
answer = self.scrypted_pc.CreateAnswer()
answer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(answer)
self.scrypted_pc.SetLocalDescription(answer)
if sendIceCandidate is not None:
loop = asyncio.get_event_loop()
def forward_candidates():
try:
candidates = self.scrypted_pc.WaitAndGetICECandidates()
self.logger.debug(f"Gathered {len(candidates)} candidates")
for candidate in candidates:
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
).Candidate
self.logger.debug(f"Sending candidate to scrypted: {candidate}")
loop.call_soon_threadsafe(
self.create_task,
sendIceCandidate({
"candidate": candidate,
"sdpMid": "0",
"sdpMLineIndex": 0,
})
)
except Exception as e:
self.logger.error(e)
t = threading.Thread(target=forward_candidates)
t.start()
return {
"sdp": answer_sdp,
"type": "answer"
}
async def setRemoteDescription(self, description, setup):
if description["type"] != "offer":
raise Exception("can only accept offers in ArloCameraRTCSignalingSession.createLocalDescription")
sdp = scrypted_arlo_go.WebRTCSessionDescription(scrypted_arlo_go.NewWebRTCSDPType("offer"), description["sdp"])
self.scrypted_pc.SetRemoteDescription(sdp)
async def addIceCandidate(self, candidate):
candidate = scrypted_arlo_go.WebRTCICECandidateInit(candidate["candidate"], "0", 0)
self.scrypted_pc.AddICECandidate(candidate)
async def getOptions(self):
pass
async def unmute_relay(self):
return
await self.arlo_pc.unmute_relay(self.arlo_relay_track)
async def mute_relay(self):
return
await self.arlo_pc.mute_relay(self.arlo_relay_track)
async def shutdown(self):
if self.ffmpeg_subprocess is not None:
self.ffmpeg_subprocess.stop()
self.ffmpeg_subprocess = None
@async_print_exception_guard
async def shutdown(self) -> None:
if self.intercom_ffmpeg_subprocess is not None:
self.intercom_ffmpeg_subprocess.stop()
self.intercom_ffmpeg_subprocess = None
if self.scrypted_pc is not None:
self.scrypted_pc.Close()
self.scrypted_pc = None
if self.arlo_pc is not None:
self.arlo_pc.Close()
self.arlo_pc = None
class ArloCameraRTCSessionControl:
def __init__(self, arlo_session):
self.arlo_session = arlo_session
self.logger = arlo_session.logger
class ArloCameraSIPIntercomSession(ArloCameraIntercomSession):
def __init__(self, camera: ArloCamera) -> None:
super().__init__(camera)
async def setPlayback(self, options):
self.logger.debug(f"setPlayback options {options}")
audio = options.get("audio")
if audio is None:
return
if audio:
await self.arlo_session.unmute_relay()
else:
await self.arlo_session.mute_relay()
self.arlo_sip = None
self.intercom_ffmpeg_subprocess = None
async def endSession(self):
self.logger.info("Ending RTC session")
await self.arlo_session.shutdown()
@async_print_exception_guard
async def initialize_push_to_talk(self, media: MediaObject) -> None:
self.logger.info("Initializing push to talk")
sip_info = self.provider.arlo.GetSIPInfo()
sip_call_info = sip_info["sipCallInfo"]
# though GetSIPInfo returns ice servers, there doesn't seem to be any indication
# that they are used on the arlo web dashboard, so just use what Chrome inserts
ice_servers = [{"url": "stun:stun.l.google.com:19302"}]
self.logger.debug(f"Will use ice servers: {[ice['url'] for ice in ice_servers]}")
ice_servers = scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string([ice['url']]),
ice.get('username', ''),
ice.get('credential', '')
)
for ice in ice_servers
])
sip_cfg = scrypted_arlo_go.SIPInfo(
DeviceID=self.camera.nativeId,
CallerURI=f"sip:{sip_call_info['id']}@{sip_call_info['domain']}:{sip_call_info['port']}",
CalleeURI=sip_call_info['calleeUri'],
Password=sip_call_info['password'],
UserAgent="SIP.js/0.20.1",
WebsocketURI="wss://livestream-z2-prod.arlo.com:7443",
WebsocketOrigin="https://my.arlo.com",
WebsocketHeaders=scrypted_arlo_go.HeadersMap({"User-Agent": USER_AGENTS["arlo"]}),
)
self.arlo_sip = scrypted_arlo_go.NewSIPWebRTCManager(self.camera.logger_server_port, ice_servers, sip_cfg)
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
audio_port = self.arlo_sip.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
ffmpeg_args = [
"-y",
"-hide_banner",
"-loglevel", "error",
"-analyzeduration", "0",
"-fflags", "-nobuffer",
"-probesize", "500000",
*ffmpeg_params["inputArguments"],
"-acodec", "libopus",
"-flags", "+global_header",
"-vbr", "off",
"-ar", "48k",
"-b:a", "32k",
"-bufsize", "96k",
"-ac", "2",
"-application", "lowdelay",
"-dn", "-sn", "-vn",
"-frame_duration", "20",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
]
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with '{' '.join(ffmpeg_args)}'")
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("FFmpeg", self.camera.logger_server_port, ffmpeg_path, *ffmpeg_args)
self.intercom_ffmpeg_subprocess.start()
def sip_start():
try:
self.arlo_sip.Start()
except Exception:
self.logger.exception("Exception starting sip call")
# do remaining setup asynchronously so the caller to startIntercom
# can start sending packets
threading.Thread(target=sip_start).start()
@async_print_exception_guard
async def shutdown(self) -> None:
if self.intercom_ffmpeg_subprocess is not None:
self.intercom_ffmpeg_subprocess.stop()
self.intercom_ffmpeg_subprocess = None
if self.arlo_sip is not None:
self.arlo_sip.Close()
self.arlo_sip = None

View File

@@ -3,42 +3,74 @@ import subprocess
import time
import threading
import scrypted_arlo_go
HEARTBEAT_INTERVAL = 5
def multiprocess_main(name, child_conn, exe, args):
print(f"[{name}] Child process starting")
sp = subprocess.Popen([exe, *args])
def multiprocess_main(name, logger_port, child_conn, exe, args):
logger = scrypted_arlo_go.NewTCPLogger(logger_port, "HeartbeatChildProcess")
logger.Send(f"{name} starting\n")
sp = subprocess.Popen([exe, *args], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# pull stdout and stderr from the subprocess and forward it over to
# our tcp logger
def logging_thread(stdstream):
while True:
line = stdstream.readline()
if not line:
break
line = str(line, 'utf-8')
logger.Send(line)
stdout_t = threading.Thread(target=logging_thread, args=(sp.stdout,))
stderr_t = threading.Thread(target=logging_thread, args=(sp.stderr,))
stdout_t.start()
stderr_t.start()
while True:
has_data = child_conn.poll(HEARTBEAT_INTERVAL * 3)
if not has_data:
break
# check if the subprocess is still alive, if not then exit
if sp.poll() is not None:
break
keep_alive = child_conn.recv()
if not keep_alive:
break
logger.Send(f"{name} exiting\n")
sp.terminate()
sp.wait()
print(f"[{name}] Child process exiting")
stdout_t.join()
stderr_t.join()
logger.Send(f"{name} exited\n")
logger.Close()
class HeartbeatChildProcess:
"""Class to manage running a child process that gets cleaned up if the parent exits.
When spawining subprocesses in Python, if the parent is forcibly killed (as is the case
when Scrypted restarts plugins), subprocesses get orphaned. This approach uses parent-child
heartbeats for the child to ensure that the parent process is still alive, and to cleanly
exit the child if the parent has terminated.
"""
def __init__(self, name, exe, *args):
def __init__(self, name, logger_port, exe, *args):
self.name = name
self.logger_port = logger_port
self.exe = exe
self.args = args
self.parent_conn, self.child_conn = multiprocessing.Pipe()
self.process = multiprocessing.Process(target=multiprocess_main, args=(name, self.child_conn, exe, args))
self.process = multiprocessing.Process(target=multiprocess_main, args=(name, logger_port, self.child_conn, exe, args))
self.process.daemon = True
self._stop = False
@@ -55,4 +87,7 @@ class HeartbeatChildProcess:
def heartbeat(self):
while not self._stop:
time.sleep(HEARTBEAT_INTERVAL)
if not self.process.is_alive():
self.stop()
break
self.parent_conn.send(True)

View File

@@ -1 +0,0 @@
EXPERIMENTAL = False

View File

@@ -0,0 +1,3 @@
import os
EXPERIMENTAL = os.environ.get("SCRYPTED_ARLO_EXPERIMENTAL", "0") not in ["", "0"]

View File

@@ -23,7 +23,7 @@ def createScryptedLogger(scrypted_device, name):
sh = ScryptedDeviceLoggingWrapper(scrypted_device)
# log formatting
fmt = logging.Formatter("[Arlo %(name)s] %(message)s")
fmt = logging.Formatter("[Arlo %(name)s]: %(message)s")
sh.setFormatter(fmt)
# configure handler to logger

View File

@@ -1,11 +1,12 @@
import asyncio
from bs4 import BeautifulSoup
import email
import functools
import imaplib
import json
import logging
import re
import requests
import traceback
from typing import List
import scrypted_sdk
@@ -43,7 +44,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
def __init__(self, nativeId: str = None) -> None:
super().__init__(nativeId=nativeId)
self.logger_name = "provider"
self.logger_name = "Provider"
self.arlo_cameras = {}
self.arlo_basestations = {}
@@ -87,6 +88,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
@property
def arlo_transport(self) -> str:
return "SSE"
# This code is here for posterity, however it looks that as of 06/01/2023
# Arlo has disabled the MQTT backend
transport = self.storage.getItem("arlo_transport")
if transport is None or transport not in ArloProvider.arlo_transport_choices:
transport = "SSE"
@@ -137,6 +141,14 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
def imap_mfa_password(self) -> str:
return self.storage.getItem("imap_mfa_password")
@property
def imap_mfa_sender(self) -> str:
sender = self.storage.getItem("imap_mfa_sender")
if sender is None or sender == "":
sender = "do_not_reply@arlo.com"
self.storage.setItem("imap_mfa_sender", sender)
return sender
@property
def imap_mfa_interval(self) -> int:
interval = self.storage.getItem("imap_mfa_interval")
@@ -149,13 +161,15 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
def arlo(self) -> Arlo:
if self._arlo is not None:
if self._arlo_mfa_complete_auth is not None:
if self._arlo_mfa_code == "":
if not self._arlo_mfa_code:
return None
self.logger.info("Completing Arlo MFA...")
self._arlo_mfa_complete_auth(self._arlo_mfa_code)
self._arlo_mfa_complete_auth = None
self._arlo_mfa_code = None
try:
self._arlo_mfa_complete_auth(self._arlo_mfa_code)
finally:
self._arlo_mfa_complete_auth = None
self._arlo_mfa_code = None
self.logger.info("Arlo MFA done")
self.storage.setItem("arlo_auth_headers", json.dumps(dict(self._arlo.request.session.headers.items())))
@@ -175,18 +189,18 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
if headers:
self._arlo.UseExistingAuth(self.arlo_user_id, json.loads(headers))
self.logger.info(f"Initialized Arlo client, reusing stored auth headers")
self.create_task(self.do_arlo_setup())
return self._arlo
else:
self._arlo_mfa_complete_auth = self._arlo.LoginMFA()
self.logger.info(f"Initialized Arlo client, waiting for MFA code")
return None
except Exception as e:
traceback.print_exc()
except Exception:
self.logger.exception("Error initializing Arlo client")
self._arlo = None
self._arlo_mfa_complete_auth = None
self._arlo_mfa_code = None
return None
raise
async def do_arlo_setup(self) -> None:
try:
@@ -196,15 +210,15 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
])
self.arlo.event_stream.set_refresh_interval(self.refresh_interval)
except requests.exceptions.HTTPError as e:
traceback.print_exc()
self.logger.error(f"Error logging in, will retry with fresh login")
except requests.exceptions.HTTPError:
self.logger.exception("Error logging in")
self.logger.error("Will retry with fresh login")
self._arlo = None
self._arlo_mfa_code = None
self.storage.setItem("arlo_auth_headers", None)
_ = self.arlo
except Exception as e:
traceback.print_exc()
except Exception:
self.logger.exception("Error logging in")
def invalidate_arlo_client(self) -> None:
if self._arlo is not None:
@@ -230,7 +244,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
self.print(f"Setting plugin transport to {self.arlo_transport}")
change_stream_class(self.arlo_transport)
def initialize_imap(self) -> None:
def initialize_imap(self, try_count=1) -> None:
if not self.imap_mfa_host or not self.imap_mfa_port or \
not self.imap_mfa_username or not self.imap_mfa_password or \
not self.imap_mfa_interval:
@@ -238,7 +252,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
self.exit_imap()
try:
self.logger.info("Trying connect to IMAP")
self.logger.info(f"Trying connect to IMAP (attempt {try_count})")
self.imap = imaplib.IMAP4_SSL(self.imap_mfa_host, port=self.imap_mfa_port)
res, _ = self.imap.login(self.imap_mfa_username, self.imap_mfa_password)
@@ -252,9 +266,14 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
res, self.imap_skip_emails = self.imap.search(None, "FROM", "do_not_reply@arlo.com")
if res.lower() != "ok":
raise Exception(f"IMAP failed to fetch old Arlo emails: {res}")
except Exception as e:
traceback.print_exc()
self.exit_imap()
except Exception:
self.logger.exception("IMAP initialization error")
if try_count >= 10:
self.logger.error("Tried to connect to IMAP too many times. Will request a plugin restart.")
self.create_task(scrypted_sdk.deviceManager.requestRestart())
asyncio.get_event_loop().call_later(try_count*try_count, functools.partial(self.initialize_imap, try_count=try_count+1))
else:
self.logger.info("Connected to IMAP")
self.imap_signal = asyncio.Queue()
@@ -286,22 +305,39 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
self.storage.setItem("arlo_user_id", "")
# initialize login and prompt for MFA
_ = self.arlo
try:
_ = self.arlo
except Exception:
self.logger.exception("Unrecoverable login error")
self.logger.error("Will request a plugin restart")
await scrypted_sdk.deviceManager.requestRestart()
return
# do imap lookup
# adapted from https://github.com/twrecked/pyaarlo/blob/77c202b6f789c7104a024f855a12a3df4fc8df38/pyaarlo/tfa.py
try:
try_count = 0
while True:
self.logger.info("Checking IMAP for MFA codes")
try_count += 1
sleep_duration = 1
if try_count > 5:
sleep_duration = 2
elif try_count > 10:
sleep_duration = 5
elif try_count > 20:
sleep_duration = 10
self.logger.info(f"Checking IMAP for MFA codes (attempt {try_count})")
self.imap.check()
res, emails = self.imap.search(None, "FROM", "do_not_reply@arlo.com")
res, emails = self.imap.search(None, "FROM", self.imap_mfa_sender)
if res.lower() != "ok":
raise Exception("IMAP error: {res}")
if emails == self.imap_skip_emails:
self.logger.info("No new emails found, will sleep and retry")
await asyncio.sleep(1)
await asyncio.sleep(sleep_duration)
continue
skip_emails = self.imap_skip_emails[0].split()
@@ -318,8 +354,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
if part.get_content_type() != "text/html":
continue
try:
for line in part.get_payload(decode=True).splitlines():
code = re.match(r"^\W+(\d{6})\W*$", line.decode())
soup = BeautifulSoup(part.get_payload(decode=True), 'html.parser')
for line in soup.get_text().splitlines():
code = re.match(r"^\W*(\d{6})\W*$", line)
if code is not None:
return code.group(1)
except:
@@ -340,20 +377,30 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
break
self.logger.info("No MFA code found, will sleep and retry")
await asyncio.sleep(1)
except Exception as e:
traceback.print_exc()
self.logger.error("Will retry on next IMAP interval")
await asyncio.sleep(sleep_duration)
except Exception:
self.logger.exception("Error while checking for MFA codes")
self._arlo = old_arlo
self.storage.setItem("arlo_auth_headers", old_headers)
self.storage.setItem("arlo_user_id", old_user_id)
self._arlo_mfa_code = None
self._arlo_mfa_complete_auth = None
self.logger.error("Will reload IMAP connection")
asyncio.get_event_loop().call_soon(self.initialize_imap)
else:
# finish login
if old_arlo:
old_arlo.Unsubscribe()
_ = self.arlo
try:
_ = self.arlo
except Exception:
self.logger.exception("Unrecoverable login error")
self.logger.error("Will request a plugin restart")
await scrypted_sdk.deviceManager.requestRestart()
return
# continue by sleeping/waiting for a signal
interval = self.imap_mfa_interval * 24 * 60 * 60 # convert interval days to seconds
@@ -439,6 +486,13 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
"type": "password",
"value": self.imap_mfa_password,
},
{
"group": "IMAP 2FA",
"key": "imap_mfa_sender",
"title": "IMAP Email Sender",
"value": self.imap_mfa_sender,
"description": "The sender email address to search for when loading 2FA codes. See plugin README for more details.",
},
{
"group": "IMAP 2FA",
"key": "imap_mfa_interval",
@@ -455,9 +509,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
"group": "General",
"key": "arlo_transport",
"title": "Underlying Transport Protocol",
"description": "Select the underlying transport protocol used to connect to Arlo Cloud.",
"description": "Arlo Cloud currently only supports the SSE protocol.",
"value": self.arlo_transport,
"choices": self.arlo_transport_choices,
"readonly": True,
},
{
"group": "General",
@@ -627,7 +681,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
device = await self.getDevice_impl(nativeId)
scrypted_interfaces = device.get_applicable_interfaces()
manifest = device.get_device_manifest()
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']} parent {camera['parentId']}): {scrypted_interfaces}")
if camera["deviceId"] == camera["parentId"]:
provider_to_device_map.setdefault(None, []).append(manifest)
@@ -647,28 +701,37 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
if len(cameras) != len(camera_devices):
self.logger.info(f"Discovered {len(cameras)} cameras, but only {len(camera_devices)} are usable")
self.logger.info(f"Are all cameras shared with admin permissions?")
else:
self.logger.info(f"Discovered {len(cameras)} cameras")
for provider_id in provider_to_device_map.keys():
if provider_id is None:
continue
if len(provider_to_device_map[provider_id]) > 0:
self.logger.debug(f"Sending {provider_id} and children to scrypted server")
else:
self.logger.debug(f"Sending {provider_id} to scrypted server")
await scrypted_sdk.deviceManager.onDevicesChanged({
"devices": provider_to_device_map[provider_id],
"providerNativeId": provider_id,
})
# ensure devices at the root match all that was discovered
self.logger.debug("Sending top level devices to scrypted server")
await scrypted_sdk.deviceManager.onDevicesChanged({
"devices": provider_to_device_map[None]
})
self.logger.debug("Done discovering devices")
async def getDevice(self, nativeId: str) -> ArloDeviceBase:
async with self.device_discovery_lock:
return await self.getDevice_impl(nativeId)
async def getDevice_impl(self, nativeId: str) -> ArloDeviceBase:
ret = self.scrypted_devices.get(nativeId, None)
ret = self.scrypted_devices.get(nativeId)
if ret is None:
ret = self.create_device(nativeId)
if ret is not None:

View File

@@ -51,4 +51,22 @@ class ArloFloodlight(ArloSpotlight):
async def turnOff(self) -> None:
self.logger.info("Turning off")
self.provider.arlo.FloodlightOff(self.arlo_basestation, self.arlo_device)
self.on = False
class ArloNightlight(ArloSpotlight):
def __init__(self, nativeId: str, arlo_device: dict, provider: ArloProvider, camera: ArloCamera) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_device, provider=provider, camera=camera)
@async_print_exception_guard
async def turnOn(self) -> None:
self.logger.info("Turning on")
self.provider.arlo.NightlightOn(self.arlo_device)
self.on = True
@async_print_exception_guard
async def turnOff(self) -> None:
self.logger.info("Turning off")
self.provider.arlo.NightlightOff(self.arlo_device)
self.on = False

View File

@@ -34,6 +34,11 @@ def async_print_exception_guard(fn):
try:
return await fn(*args, **kwargs)
except Exception:
traceback.print_exc()
# hack to detect if the applied function is actually a method
# on a scrypted object
if len(args) > 0 and hasattr(args[0], "logger"):
getattr(args[0], "logger").exception(f"{fn.__qualname__} raised an exception")
else:
traceback.print_exc()
raise
return wrapped

View File

@@ -3,10 +3,11 @@ sseclient==0.0.22
aiohttp==3.8.4
requests==2.28.2
cachetools==5.3.0
scrypted-arlo-go==0.0.2
scrypted-arlo-go==0.4.0
cloudscraper==1.2.71
cryptography==38.0.4
curl-cffi==0.5.7; platform_machine != 'armv7l'
async-timeout==4.0.2
beautifulsoup4==4.12.2
--extra-index-url=https://www.piwheels.org/simple/
--extra-index-url=https://bjia56.github.io/scrypted-arlo-go/
--prefer-binary

View File

@@ -2,7 +2,9 @@
The C300X Plugin for Scrypted allows viewing your C300X intercom with incoming video/audio.
WARNING: You will need access to the device, see https://github.com/fquinto/bticinoClasse300x
WARNING: You will need access to the device, see https://github.com/fquinto/bticinoClasse300x.
You also need the **[c300x-controller](https://github.com/slyoldfox/c300x-controller)** and node (v17.9.1) running on your device which will expose an API for the intercom.
## Development instructions
@@ -17,12 +19,37 @@ $ num run scrypted-deploy 127.0.0.1
After flashing a custom firmware you must at least:
* Install [node](https://nodejs.org/download/release/latest-v17.x/node-v17.9.1-linux-armv7l.tar.gz) on your device and run the c300x-controller on the device
* Install [/lib/libatomic.so.1](http://ftp.de.debian.org/debian/pool/main/g/gcc-10-cross/libatomic1-armhf-cross_10.2.1-6cross1_all.deb) in **/lib**
* Allow access to the SIP server on port 5060
* Allow your IP to authenticated with the SIP server
* Add a SIP user for scrypted
To do this use the guide below:
## Installing node and c300x-controller
```
$ cd /home/bticino/cfg/extra/
$ mkdir node
$ cd node
$ wget https://nodejs.org/download/release/latest-v17.x/node-v17.9.1-linux-armv7l.tar.gz
$ tar xvfz node-v17.9.1-linux-armv7l.tar.gz
```
Node will require libatomic.so.1 which isn't shipped with the device, get the .deb file from http://ftp.de.debian.org/debian/pool/main/g/gcc-10-cross/libatomic1-armhf-cross_10.2.1-6cross1_all.deb
```
$ ar x libatomic1-armhf-cross_10.2.1-6cross1_all.deb
```
scp the `libatomic.so.1` to `/lib` and check that node works:
```
$ root@C3X-00-00-00-00-00--2222222:~# /home/bticino/cfg/extra/node/bin/node -v
v17.9.1
```
## Make flexisip listen on a reachable IP and add users to it
To be able to talk to our own SIP server, we need to make the SIP server on the C300X
@@ -93,7 +120,7 @@ hashed-passwords=true
reject-wrong-client-certificates=true
````
Now we will add a `user agent` (user) that will be used by `baresip` to register itself with `flexisip`
Now we will add a `user agent` (user) that will be used by `scrypted` to register itself with `flexisip`
Edit the `/etc/flexisip/users/users.db.txt` file and create a new line by copy/pasting the c300x user.
@@ -101,7 +128,7 @@ For example:
````
c300x@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
baresip@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
scrypted@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
````
Leave the md5 as the same value - I use `fffff....` just for this example.
@@ -110,7 +137,7 @@ Edit the `/etc/flexisip/users/route.conf` file and add a new line to it, it spec
Change the IP address to the place where you will run `baresip` (same as `trusted-hosts` above)
````
<sip:baresip@1234567.bs.iotleg.com> <sip:192.168.0.XX>
<sip:scrypted@1234567.bs.iotleg.com> <sip:192.168.0.XX>
````
Edit the `/etc/flexisip/users/route_int.conf` file.
@@ -121,7 +148,7 @@ You can look at it as a group of users that is called when you call `alluser@123
Add your username at the end (make sure you stay on the same line, NOT a new line!)
````
<sip:alluser@1234567.bs.iotleg.com> ..., <sip:baresip@1234567.bs.iotleg.com>
<sip:alluser@1234567.bs.iotleg.com> ..., <sip:scrypted@1234567.bs.iotleg.com>
````
Reboot and verify flexisip is listening on the new IP address.

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/bticino",
"version": "0.0.7",
"version": "0.0.9",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/bticino",
"version": "0.0.7",
"version": "0.0.9",
"dependencies": {
"@slyoldfox/sip": "^0.0.6-1",
"sdp": "^3.0.3",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/bticino",
"version": "0.0.7",
"version": "0.0.9",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -2,7 +2,7 @@ import { closeQuiet, createBindZero, listenZeroSingleClient } from '@scrypted/co
import { sleep } from '@scrypted/common/src/sleep';
import { RtspServer } from '@scrypted/common/src/rtsp-server';
import { addTrackControls } from '@scrypted/common/src/sdp-utils';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import { SipCallSession } from '../../sip/src/sip-call-session';
import { RtpDescription } from '../../sip/src/rtp-utils';
import { VoicemailHandler } from './bticino-voicemailHandler';
@@ -19,11 +19,12 @@ import { InviteHandler } from './bticino-inviteHandler';
import { SipRequest } from '../../sip/src/sip-manager';
import { get } from 'http'
import { ControllerApi } from './c300x-controller-api';
const STREAM_TIMEOUT = 65000;
const { mediaManager } = sdk;
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips {
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips, Reboot {
private session: SipCallSession
private remoteRtpDescription: RtpDescription
@@ -35,8 +36,9 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
public requestHandlers: CompositeSipMessageHandler = new CompositeSipMessageHandler()
public incomingCallRequest : SipRequest
private settingsStorage: BticinoStorageSettings = new BticinoStorageSettings( this )
public voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
private voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
private inviteHandler : InviteHandler = new InviteHandler(this)
private controllerApi : ControllerApi = new ControllerApi(this)
//TODO: randomize this
private keyAndSalt : string = "/qE7OPGKp9hVGALG2KcvKWyFEZfSSvm7bYVDjT8X"
//private decodedSrtpOptions : SrtpOptions = decodeSrtpOptions( this.keyAndSalt )
@@ -55,14 +57,24 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
})();
}
reboot(): Promise<void> {
return new Promise<void>( (resolve,reject ) => {
let c300x = SipHelper.getIntercomIp(this)
get(`http://${c300x}:8080/reboot?now`, (res) => {
console.log("Reboot API result: " + res.statusCode)
});
})
}
getVideoClips(options?: VideoClipOptions): Promise<VideoClip[]> {
return new Promise<VideoClip[]>( (resolve,reject ) => {
let c300x = SipHelper.getIntercomIp(this)
if( !c300x ) return []
get(`http://${c300x}:8080/videoclips?raw=true&startTime=${options.startTime/1000}&endTime=${options.endTime/1000}`, (res) => {
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
const parsedData : [] = JSON.parse(rawData);
let videoClips : VideoClip[] = []
@@ -93,7 +105,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
return mediaManager.createMediaObjectFromUrl(url);
}
getVideoClipThumbnail(thumbnailId: string): Promise<MediaObject> {
let c300x = SipHelper.sipOptions(this)
let c300x = SipHelper.getIntercomIp(this)
const url = `http://${c300x}:8080/voicemail?msg=${thumbnailId}/aswm.jpg&raw=true`;
return mediaManager.createMediaObjectFromUrl(url);
}
@@ -224,8 +236,6 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
this.stopSession();
const { clientPromise: playbackPromise, port: playbackPort, url: clientUrl } = await listenZeroSingleClient()
const playbackUrl = clientUrl
@@ -234,6 +244,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
client.setKeepAlive(true, 10000)
let sip: SipCallSession
try {
await this.controllerApi.updateStreamEndpoint()
let rtsp: RtspServer;
const cleanup = () => {
client.destroy();
@@ -366,6 +377,9 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
this.voicemailHandler.cancelTimer()
this.persistentSipManager.cancelTimer()
this.controllerApi.cancelTimer()
}
reset() {

View File

@@ -6,7 +6,7 @@ export class VoicemailHandler extends SipRequestHandler {
constructor( private sipCamera : BticinoSipCamera ) {
super()
setTimeout( () => {
this.timeout = setTimeout( () => {
// Delay a bit an run in a different thread in case this fails
this.checkVoicemail()
}, 10000 )
@@ -25,7 +25,7 @@ export class VoicemailHandler extends SipRequestHandler {
this.timeout = setTimeout( () => this.checkVoicemail() , 5 * 60 * 1000 )
}
cancelVoicemailCheck() {
cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}

View File

@@ -0,0 +1,125 @@
import * as nodeIp from "ip";
import { get } from 'http'
import * as net from 'net'
import { BticinoSipCamera } from "./bticino-camera";
import { SipHelper } from './sip-helper';
export class ControllerApi {
private timeout : NodeJS.Timeout
constructor( private sipCamera : BticinoSipCamera ) {
this.timeout = setTimeout( () => {
// Delay a bit an run in a different thread in case this fails
this.registerEndpoints( true )
}, 5000 )
}
/**
* Will validate certain requirements for scrypted to work correctly with the intercom:
*/
public static validate( ipAddress ) {
return this.validateFlexisipSipPort(ipAddress).then( this.validateController )
}
/**
* Will validate if the non secure SIP port was opened after modifying /etc/init.d/flexisipsh
*/
private static validateFlexisipSipPort( ipAddress : string ) : Promise<string> {
let conn = net.createConnection( { host: ipAddress, port: 5060, timeout: 5000 } )
return new Promise( (resolve, reject) => {
conn.setTimeout(5000);
conn.on('connect', () => resolve( ipAddress ));
conn.on('timeout', () => reject( new Error("Timeout connecting to port 5060, is this a Bticino intercom? Did you change /etc/init.d/flexisipsh to make it listen on this port?") ) );
conn.on('error', () => reject( new Error("Error connecting to port 5060, is this a Bticino intercom? Did you change /etc/init.d/flexisipsh to make it listen on this port?") ) );
})
}
/**
* Will validate if the c300x-controller is running on port 8080.
* The c300x-controller will return errors if some configuration errors are present on the intercom.
*/
private static validateController( ipAddress : string ) : Promise<void> {
// Will throw an exception if invalid format
const c300x = nodeIp.toBuffer( ipAddress )
const validatedIp = nodeIp.toString(c300x)
const url = `http://${validatedIp}:8080/validate-setup?raw=true`
return new Promise( (resolve, reject) => get(url, (res) => {
let body = "";
res.on("data", data => { body += data });
res.on("end", () => {
try {
let parsedBody = JSON.parse( body )
if( parsedBody["errors"].length > 0 ) {
reject( new Error( parsedBody["errors"][0] ) )
} else {
parsedBody["ipAddress"] = validatedIp
resolve( parsedBody )
}
} catch( e ) {
reject( e )
}
})
res.on("error", (e) => { reject(e)})
if( res.statusCode != 200 ) {
reject( new Error(`Could not validate required c300x-controller. Check ${url}`) )
}
} ).on("error", (e) => { reject(`Could not connect to the c300x-controller at ${url}`) }) )
}
/**
* This verifies if the intercom is customized correctly. It verifies:
*
* - if a dedicated scrypted sip user is added for this specific camera instance in /etc/flexisip/users/users.db.txt
* - if this dedicated scrypted sip user is configured in /etc/flexisip/users/route.conf and /etc/flexisip/users/route_int.conf
*/
public registerEndpoints( verifyUser : boolean ) {
let ipAddress = SipHelper.getIntercomIp(this.sipCamera)
let sipFrom = SipHelper.getIdentifier(this.sipCamera)
const pressed = Buffer.from(this.sipCamera.doorbellWebhookUrl + 'pressed').toString('base64')
const locked = Buffer.from(this.sipCamera.doorbellLockWebhookUrl + 'locked').toString('base64')
const unlocked = Buffer.from(this.sipCamera.doorbellLockWebhookUrl + 'unlocked').toString('base64')
get(`http://${ipAddress}:8080/register-endpoint?raw=true&identifier=${sipFrom}&pressed=${pressed}&locked=${locked}&unlocked=${unlocked}&verifyUser=${verifyUser}`, (res) => {
if( verifyUser ) {
let body = "";
res.on("data", data => { body += data });
res.on("end", () => {
try {
let parsedBody = JSON.parse( body )
if( parsedBody["errors"].length > 0 ) {
this.sipCamera.log.a("This camera is not setup correctly, it will not be able to receive the incoming doorbell stream. Check the console for the errors.")
parsedBody["errors"].forEach( error => {
this.sipCamera.console.error( "ERROR: " + error )
});
}
} catch( e ) {
this.sipCamera.console.error("Error parsing body to JSON: " + body )
}
})
}
console.log("Endpoint registration status: " + res.statusCode)
});
// The default evict time on the c300x-controller is 5 minutes, so this will certainly be within bounds
this.timeout = setTimeout( () => this.registerEndpoints( false ) , 2 * 60 * 1000 )
}
/**
* Informs the c300x-controller where to send the stream to
*/
public updateStreamEndpoint() : Promise<void> {
let ipAddress = SipHelper.getIntercomIp(this.sipCamera)
let sipFrom = SipHelper.getIdentifier(this.sipCamera)
return new Promise( (resolve, reject) => get(`http://${ipAddress}:8080/register-endpoint?raw=true&updateStreamEndpoint=${sipFrom}`, (res) => {
if( res.statusCode != 200 ) reject( "ERROR: Could not update streaming endpoint, call returned: " + res.statusCode )
else resolve()
} ) );
}
public cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}
}
}

View File

@@ -1,6 +1,7 @@
import sdk, { Device, DeviceCreator, DeviceCreatorSettings, DeviceProvider, LockState, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, Setting } from '@scrypted/sdk'
import { randomBytes } from 'crypto'
import { BticinoSipCamera } from './bticino-camera'
import { ControllerApi } from './c300x-controller-api';
const { systemManager, deviceManager } = sdk
@@ -14,41 +15,60 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
key: 'newCamera',
title: 'Add Camera',
placeholder: 'Camera name, e.g.: Back Yard Camera, Baby Camera, etc',
}
},
{
key: 'ip',
title: 'IP Address',
placeholder: 'IP Address of the C300X intercom',
}
]
}
async createDevice(settings: DeviceCreatorSettings): Promise<string> {
const nativeId = randomBytes(4).toString('hex')
const name = settings.newCamera?.toString()
const camera = await this.updateDevice(nativeId, name)
const device: Device = {
providerNativeId: nativeId,
info: {
//model: `${camera.model} (${camera.data.kind})`,
manufacturer: 'BticinoPlugin',
//firmware: camera.data.firmware_version,
//serialNumber: camera.data.device_id
},
nativeId: nativeId + '-lock',
name: name + ' Lock',
type: ScryptedDeviceType.Lock,
interfaces: [ScryptedInterface.Lock, ScryptedInterface.HttpRequestHandler],
if( !settings.ip ) {
throw new Error('IP address is required!')
}
const ret = await deviceManager.onDevicesChanged({
providerNativeId: nativeId,
devices: [device],
let validate = ControllerApi.validate( settings.ip )
return validate.then( async (setupData) => {
const nativeId = randomBytes(4).toString('hex')
const name = settings.newCamera?.toString() === undefined ? "Doorbell" : settings.newCamera?.toString()
await this.updateDevice(nativeId, name)
const device: Device = {
providerNativeId: nativeId,
info: {
//model: `${camera.model} (${camera.data.kind})`,
manufacturer: 'BticinoPlugin',
//firmware: camera.data.firmware_version,
//serialNumber: camera.data.device_id
},
nativeId: nativeId + '-lock',
name: name + ' Lock',
type: ScryptedDeviceType.Lock,
interfaces: [ScryptedInterface.Lock, ScryptedInterface.HttpRequestHandler],
}
await deviceManager.onDevicesChanged({
providerNativeId: nativeId,
devices: [device],
})
let sipCamera : BticinoSipCamera = await this.getDevice(nativeId)
sipCamera.putSetting("sipfrom", "scrypted-" + sipCamera.id + "@127.0.0.1")
sipCamera.putSetting("sipto", "c300x@" + setupData["ipAddress"] )
sipCamera.putSetting("sipdomain", setupData["domain"])
sipCamera.putSetting("sipdebug", true )
systemManager.getDeviceById<BticinoSipCamera>(sipCamera.id)
let lock = await sipCamera.getDevice(undefined)
lock.lockState = LockState.Locked
return nativeId
})
let sipCamera : BticinoSipCamera = await this.getDevice(nativeId)
let foo : BticinoSipCamera = systemManager.getDeviceById<BticinoSipCamera>(sipCamera.id)
let lock = await sipCamera.getDevice(undefined)
lock.lockState = LockState.Locked
return nativeId
}
updateDevice(nativeId: string, name: string) {
@@ -69,7 +89,8 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
ScryptedInterface.BinarySensor,
ScryptedDeviceType.DeviceProvider,
ScryptedInterface.HttpRequestHandler,
ScryptedInterface.VideoClips
ScryptedInterface.VideoClips,
ScryptedInterface.Reboot
],
type: ScryptedDeviceType.Doorbell,
})
@@ -86,7 +107,6 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
async releaseDevice(id: string, nativeId: string): Promise<void> {
let camera = this.devices.get(nativeId)
if( camera ) {
camera.voicemailHandler.cancelVoicemailCheck()
if( this.devices.delete( nativeId ) ) {
this.console.log("Removed device from list: " + id + " / " + nativeId )
}

View File

@@ -14,10 +14,11 @@ export class PersistentSipManager {
private sipManager : SipManager
private lastRegistration : number = 0
private expireInterval : number = 0
private timeout : NodeJS.Timeout
constructor( private camera : BticinoSipCamera ) {
// Give it a second and run in seperate thread to avoid failure on creation for from/to/domain check
setTimeout( () => this.enable() , CHECK_INTERVAL )
this.timeout = setTimeout( () => this.enable() , CHECK_INTERVAL )
}
async enable() : Promise<SipManager> {
@@ -56,7 +57,7 @@ export class PersistentSipManager {
this.lastRegistration = now + (60 * 1000) - this.expireInterval
throw e
} finally {
setTimeout( () => this.register(), CHECK_INTERVAL )
this.timeout = setTimeout( () => this.register(), CHECK_INTERVAL )
}
}
@@ -65,6 +66,12 @@ export class PersistentSipManager {
return SipCallSession.createCallSession(this.camera.console, "Bticino", sipOptions, sm )
}
cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}
}
reloadSipOptions() {
this.sipManager?.setSipOptions( null )
}

View File

@@ -39,6 +39,15 @@ export class SipHelper {
}
}
public static getIdentifier( camera : BticinoSipCamera ) : string {
let to = camera.storage.getItem('sipfrom')?.trim();
const domain = camera.storage.getItem('sipdomain')?.trim()
if( to ) {
return to.split('@')[0] + '%40' + domain;
}
return
}
public static getIntercomIp( camera : BticinoSipCamera ): string {
let to = camera.storage.getItem('sipto')?.trim();
if( to ) {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.1.128",
"version": "0.1.130",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.1.128",
"version": "0.1.130",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.1.128",
"version": "0.1.130",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -5,6 +5,8 @@
<v-card-text>
<v-card-title style="justify-content: center;" class="headline text-uppercase">Scrypted
</v-card-title>
<v-card-subtitle v-if="$store.state.hasLogin === false" style="display: flex; justify-content: center;" class="text-uppercase">Create Account
</v-card-subtitle>
<v-container grid-list-md>
<v-layout wrap>
<v-flex xs12>
@@ -34,12 +36,13 @@
<v-card-actions>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://twitter.com/scryptedapp/">
<v-icon small>fab fa-twitter</v-icon>
<v-btn v-on="on" icon href="https://discord.gg/DcFzmBHYGq">
<v-icon small>fab fa-discord</v-icon>
</v-btn>
</template>
<span>Twitter</span>
<span>Discord</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://www.reddit.com/r/Scrypted/">
@@ -48,6 +51,7 @@
</template>
<span>Reddit</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://github.com/koush/scrypted">
@@ -56,14 +60,7 @@
</template>
<span>Github</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://discord.gg/DcFzmBHYGq">
<v-icon small>fab fa-discord</v-icon>
</v-btn>
</template>
<span>Discord</span>
</v-tooltip>
<v-spacer></v-spacer>
<v-btn type="submit" text @click.prevent="doLogin">Log In</v-btn>
</v-card-actions>

View File

@@ -50,6 +50,17 @@
<v-list-item-title>Discord</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-list-item link href="https://www.reddit.com/r/Scrypted/" active-class="purple white--text tile">
<v-list-item-icon>
<v-icon small>fab fa-reddit</v-icon>
</v-list-item-icon>
<v-list-item-content>
<v-list-item-title>Reddit</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-list-item link href="https://github.com/koush/scrypted" active-class="purple white--text tile">
<v-list-item-icon>
<v-icon small>fab fa-github</v-icon>
@@ -59,6 +70,7 @@
<v-list-item-title>Github</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-divider></v-divider>
<v-list-item active-class="deep-purple accent-4 white--text">
<v-list-item-icon>
@@ -137,20 +149,31 @@ export default {
getComponentViewPath,
async checkUpdateAvailable() {
await this.$connectingScrypted;
const info = await this.$scrypted.systemManager.getComponent("info");
const version = await info.getVersion();
this.currentVersion = version;
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
const serviceControl = await this.$scrypted.systemManager.getComponent(
"service-control"
);
this.updateAvailable = updateAvailable;
if (updateAvailable) {
try {
this.updateAvailable = await serviceControl.getUpdateAvailable();
}
catch (e) {
// old scrypted servers dont support this call, or it may be unimplemented
// in which case fall back and determine what the install type is.
const info = await this.$scrypted.systemManager.getComponent("info");
const version = await info.getVersion();
this.currentVersion = version;
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
);
this.updateAvailable = updateAvailable;
}
if (this.updateAvailable) {
const logger = this.$scrypted.deviceManager.getDeviceLogger();
const u = new URL(window.location)
u.hash = "#/component/settings";
logger.clearAlerts();
logger.a(`Scrypted Server update available: ${updateAvailable}. ${u}`);
logger.a(`Scrypted Server update available: ${this.updateAvailable}. ${u}`);
}
},
filterComponents: function (category) {

View File

@@ -130,17 +130,28 @@ export default {
const info = await this.$scrypted.systemManager.getComponent("info");
const version = await info.getVersion();
this.currentVersion = version;
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
const serviceControl = await this.$scrypted.systemManager.getComponent(
"service-control"
);
this.updateAvailable = updateAvailable;
try {
this.updateAvailable = await serviceControl.getUpdateAvailable();
}
catch (e) {
// old scrypted servers dont support this call, or it may be unimplemented
// in which case fall back and determine what the install type is.
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
);
this.updateAvailable = updateAvailable;
}
},
async loadEnv() {
const info = await this.$scrypted.systemManager.getComponent("info");
const env = await info.getScryptedEnv();
this.showRestart = !!env.SCRYPTED_CAN_RESTART;
this.canUpdate = !!env.SCRYPTED_NPM_SERVE || !!env.SCRYPTED_WEBHOOK_UPDATE;
this.canUpdate = !!env.SCRYPTED_NPM_SERVE || !!env.SCRYPTED_WEBHOOK_UPDATE || !!env.SCRYPTED_CAN_UPDATE;
},
async doRestart() {
this.restartStatus = "Restarting...";

View File

@@ -40,7 +40,7 @@
<v-btn v-on="on" small>
<v-icon x-small>fa fa-calendar-alt</v-icon>
&nbsp;
{{ new Date(date).getFullYear() }}-{{ new Date(date).getMonth() }}-{{ new Date(date).getDate() }}
{{ new Date(date).getFullYear() }}-{{ new Date(date).getMonth() + 1 }}-{{ new Date(date).getDate() }}
</v-btn>
</template>
<v-card>

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/coreml",
"version": "0.1.15",
"version": "0.1.21",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.15",
"version": "0.1.21",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -40,5 +40,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.15"
"version": "0.1.21"
}

View File

@@ -1,45 +1,125 @@
from __future__ import annotations
import re
import scrypted_sdk
from typing import Any, Tuple
from predict import PredictPlugin, Prediction, Rectangle
import coremltools as ct
import os
from PIL import Image
import asyncio
import concurrent.futures
import os
import platform
import re
from typing import Any, Tuple
import coremltools as ct
import numpy as np
import scrypted_sdk
from PIL import Image
from scrypted_sdk import Setting, SettingValue
import yolo
from predict import Prediction, PredictPlugin, Rectangle
predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "CoreML-Predict")
def parse_label_contents(contents: str):
lines = contents.splitlines()
ret = {}
for row_number, content in enumerate(lines):
pair = re.split(r'[:\s]+', content.strip(), maxsplit=1)
pair = re.split(r"[:\s]+", content.strip(), maxsplit=1)
if len(pair) == 2 and pair[0].strip().isdigit():
ret[int(pair[0])] = pair[1].strip()
else:
ret[row_number] = content.strip()
return ret
class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/coreml-survival-guide/master/MobileNetV2%2BSSDLite/coco_labels.txt', 'coco_labels.txt')
modelFile = self.downloadFile('https://github.com/koush/coreml-survival-guide/raw/master/MobileNetV2%2BSSDLite/ObjectDetection/ObjectDetection/MobileNetV2_SSDLite.mlmodel', 'MobileNetV2_SSDLite.mlmodel')
model = self.storage.getItem("model") or "Default"
if model == "Default":
# model = "ssdlite_mobilenet_v2"
if "arm" in platform.processor():
model = "yolov8n"
else:
model = "ssdlite_mobilenet_v2"
self.yolo = "yolo" in model
self.yolov8 = "yolov8" in model
model_version = "v2"
print(f"model: {model}")
if not self.yolo:
# todo convert these to mlpackage
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_labels.txt",
"coco_labels.txt",
)
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
else:
if self.yolov8:
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
else:
files = [
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/FeatureDescriptions.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/Metadata.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
for f in files:
p = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{f}",
f"{model_version}/{f}",
)
modelFile = os.path.dirname(p)
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_80cl.txt",
f"{model_version}/{model}/coco_80cl.txt",
)
self.model = ct.models.MLModel(modelFile)
self.modelspec = self.model.get_spec()
self.inputdesc = self.modelspec.description.input[0]
self.inputheight = self.inputdesc.type.imageType.height
self.inputwidth = self.inputdesc.type.imageType.width
labels_contents = open(labelsFile, 'r').read()
labels_contents = open(labelsFile, "r").read()
self.labels = parse_label_contents(labels_contents)
# csv in mobilenet model
# self.modelspec.description.metadata.userDefined['classes']
self.loop = asyncio.get_event_loop()
self.minThreshold = .2
self.minThreshold = 0.2
async def getSettings(self) -> list[Setting]:
model = self.storage.getItem("model") or "Default"
return [
{
"key": "model",
"title": "Model",
"description": "The detection model used to find objects.",
"choices": [
"Default",
"ssdlite_mobilenet_v2",
"yolov4-tiny",
"yolov8n",
],
"value": model,
},
]
async def putSetting(self, key: str, value: SettingValue):
self.storage.setItem(key, value)
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Settings.value, None)
await scrypted_sdk.deviceManager.requestRestart()
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
@@ -49,17 +129,71 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
return (self.inputwidth, self.inputheight)
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
# run in executor if this is the plugin loop
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(predictExecutor, lambda: self.model.predict({'image': input, 'confidenceThreshold': self.minThreshold }))
else:
out_dict = self.model.predict({'image': input, 'confidenceThreshold': self.minThreshold })
coordinatesList = out_dict['coordinates'].astype(float)
objs = []
for index, confidenceList in enumerate(out_dict['confidence'].astype(float)):
# run in executor if this is the plugin loop
if self.yolo:
input_name = "image" if self.yolov8 else "input_1"
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: self.model.predict({input_name: input})
)
else:
out_dict = self.model.predict({input_name: input})
if self.yolov8:
out_blob = out_dict["var_914"]
var_914 = out_dict["var_914"]
results = var_914[0]
objs = yolo.parse_yolov8(results)
ret = self.create_detection_result(objs, src_size, cvss)
return ret
out_blob = out_dict["Identity"]
objects = yolo.parse_yolo_region(
out_blob,
(input.width, input.height),
(81, 82, 135, 169, 344, 319),
# (23,27, 37,58, 81,82),
False,
)
for r in objects:
obj = Prediction(
r["classId"].astype(float),
r["confidence"].astype(float),
Rectangle(
r["xmin"].astype(float),
r["ymin"].astype(float),
r["xmax"].astype(float),
r["ymax"].astype(float),
),
)
objs.append(obj)
# what about output[1]?
# 26 26
# objects = yolo.parse_yolo_region(out_blob, (input.width, input.height), (23,27, 37,58, 81,82))
ret = self.create_detection_result(objs, src_size, cvss)
return ret
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(
predictExecutor,
lambda: self.model.predict(
{"image": input, "confidenceThreshold": self.minThreshold}
),
)
else:
out_dict = self.model.predict(
{"image": input, "confidenceThreshold": self.minThreshold}
)
coordinatesList = out_dict["coordinates"].astype(float)
for index, confidenceList in enumerate(out_dict["confidence"].astype(float)):
values = confidenceList
maxConfidenceIndex = max(range(len(values)), key=values.__getitem__)
maxConfidence = confidenceList[maxConfidenceIndex]
@@ -80,12 +214,9 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
l = x - w2
t = y - h2
obj = Prediction(maxConfidenceIndex, maxConfidence, Rectangle(
l,
t,
l + w,
t + h
))
obj = Prediction(
maxConfidenceIndex, maxConfidence, Rectangle(l, t, l + w, t + h)
)
objs.append(obj)
ret = self.create_detection_result(objs, src_size, cvss)

View File

@@ -1,3 +1,4 @@
#
coremltools
# pillow for anything not intel linux, pillow-simd is available on x64 linux

1
plugins/coreml/src/yolo Symbolic link
View File

@@ -0,0 +1 @@
../../openvino/src/yolo

View File

@@ -1,4 +1,4 @@
{
"scrypted.debugHost": "127.0.0.1",
"scrypted.debugHost": "koushik-ubuntu",
}

View File

@@ -1,24 +1,24 @@
{
"name": "@scrypted/google-device-access",
"version": "0.0.96",
"version": "0.0.97",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/google-device-access",
"version": "0.0.96",
"version": "0.0.97",
"dependencies": {
"@googleapis/smartdevicemanagement": "^1.0.0",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"axios": "^1.3.4",
"axios": "^1.4.0",
"client-oauth2": "^4.3.3",
"lodash": "^4.17.21"
},
"devDependencies": {
"@types/debug": "^4.1.7",
"@types/lodash": "^4.14.191",
"@types/node": "^18.14.1"
"@types/debug": "^4.1.8",
"@types/lodash": "^4.14.195",
"@types/node": "^20.4.1"
}
},
"../../common": {
@@ -38,7 +38,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.69",
"version": "0.2.103",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -101,18 +101,18 @@
"integrity": "sha512-sBSO19KzdrJCM3gdx6eIxV8M9Gxfgg6iDQmH5TIAGaUu+X9VDdsINXJOnoiZ1Kx3TrHdH4bt5UVglkjsEGBcvw=="
},
"node_modules/@types/debug": {
"version": "4.1.7",
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz",
"integrity": "sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==",
"version": "4.1.8",
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.8.tgz",
"integrity": "sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==",
"dev": true,
"dependencies": {
"@types/ms": "*"
}
},
"node_modules/@types/lodash": {
"version": "4.14.191",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.191.tgz",
"integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==",
"version": "4.14.195",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.195.tgz",
"integrity": "sha512-Hwx9EUgdwf2GLarOjQp5ZH8ZmblzcbTBC2wtQWNKARBSxM9ezRIAUpeDTgoQRAFB0+8CNWXVA9+MaSOzOF3nPg==",
"dev": true
},
"node_modules/@types/ms": {
@@ -122,9 +122,9 @@
"dev": true
},
"node_modules/@types/node": {
"version": "18.14.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz",
"integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==",
"version": "20.4.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.1.tgz",
"integrity": "sha512-JIzsAvJeA/5iY6Y/OxZbv1lUcc8dNSE77lb2gnBH+/PJ3lFR1Ccvgwl5JWnHAkNHcRsT0TbpVOsiMKZ1F/yyJg==",
"dev": true
},
"node_modules/@types/tough-cookie": {
@@ -157,9 +157,9 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.3.4.tgz",
"integrity": "sha512-toYm+Bsyl6VC5wSkfkbbNB6ROv7KY93PEBBL6xyDczaIHasAiv4wPqQ/c4RjoQzipxRD2W5g21cOqQulZ7rHwQ==",
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
"dependencies": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",
@@ -841,18 +841,18 @@
"integrity": "sha512-sBSO19KzdrJCM3gdx6eIxV8M9Gxfgg6iDQmH5TIAGaUu+X9VDdsINXJOnoiZ1Kx3TrHdH4bt5UVglkjsEGBcvw=="
},
"@types/debug": {
"version": "4.1.7",
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.7.tgz",
"integrity": "sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==",
"version": "4.1.8",
"resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.8.tgz",
"integrity": "sha512-/vPO1EPOs306Cvhwv7KfVfYvOJqA/S/AXjaHQiJboCZzcNDb+TIJFN9/2C9DZ//ijSKWioNyUxD792QmDJ+HKQ==",
"dev": true,
"requires": {
"@types/ms": "*"
}
},
"@types/lodash": {
"version": "4.14.191",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.191.tgz",
"integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==",
"version": "4.14.195",
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.195.tgz",
"integrity": "sha512-Hwx9EUgdwf2GLarOjQp5ZH8ZmblzcbTBC2wtQWNKARBSxM9ezRIAUpeDTgoQRAFB0+8CNWXVA9+MaSOzOF3nPg==",
"dev": true
},
"@types/ms": {
@@ -862,9 +862,9 @@
"dev": true
},
"@types/node": {
"version": "18.14.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.1.tgz",
"integrity": "sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==",
"version": "20.4.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.1.tgz",
"integrity": "sha512-JIzsAvJeA/5iY6Y/OxZbv1lUcc8dNSE77lb2gnBH+/PJ3lFR1Ccvgwl5JWnHAkNHcRsT0TbpVOsiMKZ1F/yyJg==",
"dev": true
},
"@types/tough-cookie": {
@@ -891,9 +891,9 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"axios": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.3.4.tgz",
"integrity": "sha512-toYm+Bsyl6VC5wSkfkbbNB6ROv7KY93PEBBL6xyDczaIHasAiv4wPqQ/c4RjoQzipxRD2W5g21cOqQulZ7rHwQ==",
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
"requires": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",

View File

@@ -40,14 +40,14 @@
"@scrypted/sdk": "file:../../sdk",
"@scrypted/common": "file:../../common",
"@googleapis/smartdevicemanagement": "^1.0.0",
"axios": "^1.3.4",
"axios": "^1.4.0",
"client-oauth2": "^4.3.3",
"lodash": "^4.17.21"
},
"devDependencies": {
"@types/debug": "^4.1.7",
"@types/lodash": "^4.14.191",
"@types/node": "^18.14.1"
"@types/debug": "^4.1.8",
"@types/lodash": "^4.14.195",
"@types/node": "^20.4.1"
},
"version": "0.0.96"
"version": "0.0.97"
}

View File

@@ -161,7 +161,9 @@ class NestCamera extends ScryptedDeviceBase implements Readme, Camera, VideoCame
},
setRemoteDescription: async (description: RTCSessionDescriptionInit, setup: RTCAVSignalingSetup) => {
const offerSdp = description.sdp.replace('a=ice-options:trickle\r\n', '');
const offerSdp = description.sdp.replace('a=ice-options:trickle\r\n', '')
// hack, webrtc plugin is not resecting recvonly for some reason
.replaceAll('sendrecv', 'recvonly');
const result = await this.provider.authPost(`/devices/${this.nativeId}:executeCommand`, {
command: "sdm.devices.commands.CameraLiveStream.GenerateWebRtcStream",

View File

@@ -1,21 +1,21 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.127",
"version": "0.0.128",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/hikvision",
"version": "0.0.127",
"version": "0.0.128",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.9",
"axios": "^0.23.0",
"@types/xml2js": "^0.4.11",
"axios": "^1.4.0",
"lodash": "^4.17.21",
"xml2js": "^0.4.23"
"xml2js": "^0.6.0"
},
"devDependencies": {
"@types/node": "^18.15.11"
@@ -38,7 +38,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.87",
"version": "0.2.103",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -107,24 +107,50 @@
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/@types/xml2js": {
"version": "0.4.9",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.9.tgz",
"integrity": "sha512-CHiCKIihl1pychwR2RNX5mAYmJDACgFVCMT5OArMaO3erzwXVcBqPcusr+Vl8yeeXukxZqtF8mZioqX+mpjjdw==",
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/auth-header": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/auth-header/-/auth-header-1.0.0.tgz",
"integrity": "sha512-CPPazq09YVDUNNVWo4oSPTQmtwIzHusZhQmahCKvIsk0/xH6U3QsMAv3sM+7+Q0B1K2KJ/Q38OND317uXs4NHA=="
},
"node_modules/axios": {
"version": "0.23.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.23.0.tgz",
"integrity": "sha512-NmvAE4i0YAv5cKq8zlDoPd1VLKAqX5oLuZKs8xkJa4qi6RGn0uhCYFjWtHHC9EM/MwOwYWOs53W+V0aqEXq1sg==",
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
"dependencies": {
"follow-redirects": "^1.14.4"
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/follow-redirects": {
@@ -146,20 +172,57 @@
}
}
},
"node_modules/form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/xml2js": {
"version": "0.4.23",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
"integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
@@ -238,45 +301,93 @@
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"@types/xml2js": {
"version": "0.4.9",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.9.tgz",
"integrity": "sha512-CHiCKIihl1pychwR2RNX5mAYmJDACgFVCMT5OArMaO3erzwXVcBqPcusr+Vl8yeeXukxZqtF8mZioqX+mpjjdw==",
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"requires": {
"@types/node": "*"
}
},
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"auth-header": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/auth-header/-/auth-header-1.0.0.tgz",
"integrity": "sha512-CPPazq09YVDUNNVWo4oSPTQmtwIzHusZhQmahCKvIsk0/xH6U3QsMAv3sM+7+Q0B1K2KJ/Q38OND317uXs4NHA=="
},
"axios": {
"version": "0.23.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.23.0.tgz",
"integrity": "sha512-NmvAE4i0YAv5cKq8zlDoPd1VLKAqX5oLuZKs8xkJa4qi6RGn0uhCYFjWtHHC9EM/MwOwYWOs53W+V0aqEXq1sg==",
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
"requires": {
"follow-redirects": "^1.14.4"
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",
"proxy-from-env": "^1.1.0"
}
},
"combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"requires": {
"delayed-stream": "~1.0.0"
}
},
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
},
"follow-redirects": {
"version": "1.15.1",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
},
"form-data": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"mime-types": "^2.1.12"
}
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="
},
"mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"requires": {
"mime-db": "1.52.0"
}
},
"proxy-from-env": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"xml2js": {
"version": "0.4.23",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
"integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.127",
"version": "0.0.128",
"description": "Hikvision Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -38,10 +38,10 @@
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.9",
"axios": "^0.23.0",
"@types/xml2js": "^0.4.11",
"axios": "^1.4.0",
"lodash": "^4.17.21",
"xml2js": "^0.4.23"
"xml2js": "^0.6.0"
},
"devDependencies": {
"@types/node": "^18.15.11"

View File

@@ -117,6 +117,7 @@ export class HikvisionCameraAPI {
method: "GET",
responseType: 'arraybuffer',
url: url,
timeout: 60000,
});
return Buffer.from(response.data);

View File

@@ -27,7 +27,7 @@ If recordings dont work, it's generally because of a few reasons, **follow the s
3) If HomeKit requested the video, but nothing showed up in the timeline:
* HomeKit may have decided the motion wasn't worth recording. Set your HomeKit recording options to all motion when testing.
* The recordings are in a bad format that can't be used by HomeKit. See below for optimal HomeKit Codec Settings. Enabling Transcode Debug Mode in the HomeKit settings for that camera may fix this for testing purposes, but long term usage is not recommended as it reduces quality and increases CPU load.
* The recordings are in a bad format that can't be used by HomeKit. See below for optimal HomeKit Codec Settings. Enabling `Debug Mode` (select `Transcode Video` and `Transcode Audio`) in the HomeKit settings for that camera may fix this for testing purposes, but long term usage is not recommended as it reduces quality and increases CPU load.
* Try rebooting your Home Hubs (HomePods and AppleTVs). Make sure they are fully up to date.
### HomeKit Discovery and Pairing Issues

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/homekit",
"version": "1.2.25",
"version": "1.2.29",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/homekit",
"version": "1.2.25",
"version": "1.2.29",
"dependencies": {
"@koush/werift-src": "file:../../external/werift",
"check-disk-space": "^3.3.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/homekit",
"version": "1.2.25",
"version": "1.2.29",
"description": "HomeKit Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -0,0 +1,119 @@
import { ScryptedDevice, ScryptedDeviceType, ScryptedInterface, AirPurifierStatus, AirPurifierMode, AirPurifier, FilterMaintenance } from '@scrypted/sdk';
import { addSupportedType, bindCharacteristic, DummyDevice, } from '../common';
import { Characteristic, CharacteristicEventTypes, CharacteristicSetCallback, CharacteristicValue, Service } from '../hap';
import { makeAccessory } from './common';
import type { HomeKitPlugin } from "../main";
addSupportedType({
type: ScryptedDeviceType.AirPurifier,
probe(device: DummyDevice): boolean {
return device.interfaces.includes(ScryptedInterface.AirPurifier);
},
getAccessory: async (device: ScryptedDevice & AirPurifier & FilterMaintenance, homekitPlugin: HomeKitPlugin) => {
const accessory = makeAccessory(device, homekitPlugin);
const service = accessory.addService(Service.AirPurifier, device.name);
const nightModeService = accessory.addService(Service.Switch, `${device.name} Night Mode`)
/* On/Off AND mode toggle */
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.Active,
() => {
switch(device.airPurifierState.status) {
case AirPurifierStatus.Active:
return Characteristic.Active.ACTIVE;
case AirPurifierStatus.ActiveNightMode:
return Characteristic.Active.ACTIVE;
}
return Characteristic.Active.INACTIVE;
});
service.getCharacteristic(Characteristic.Active)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
device.setAirPurifierState({
status: (value as boolean) ? AirPurifierStatus.Active : AirPurifierStatus.Inactive,
})
});
/* Current State */
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.CurrentAirPurifierState,
() => {
switch (device.airPurifierState.status) {
case AirPurifierStatus.Inactive:
return Characteristic.CurrentAirPurifierState.INACTIVE;
case AirPurifierStatus.Idle:
return Characteristic.CurrentAirPurifierState.IDLE;
}
return Characteristic.CurrentAirPurifierState.PURIFYING_AIR;
});
/* Fan Speed */
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.RotationSpeed,
() => device.airPurifierState.speed);
service.getCharacteristic(Characteristic.RotationSpeed)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
device.setAirPurifierState({
speed: value,
})
})
/* i.e. Mode: Manual/Auto slider */
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.TargetAirPurifierState,
() => {
if (device.airPurifierState.mode == AirPurifierMode.Automatic)
return Characteristic.TargetAirPurifierState.AUTO;
return Characteristic.TargetAirPurifierState.MANUAL;
});
service.getCharacteristic(Characteristic.TargetAirPurifierState)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
device.setAirPurifierState({
mode: value === Characteristic.TargetAirPurifierState.AUTO ? AirPurifierMode.Automatic : AirPurifierMode.Manual,
})
});
/* LockPhysicalControls i.e. "Child Lock: Unlocked/Locked" */
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.LockPhysicalControls,
() => !!device.airPurifierState.lockPhysicalControls);
service.getCharacteristic(Characteristic.LockPhysicalControls)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
device.setAirPurifierState({
lockPhysicalControls: (value as boolean),
})
})
/* Night mode switch */
bindCharacteristic(device, ScryptedInterface.AirPurifier, nightModeService, Characteristic.On,
() => !!(device.airPurifierState.status === AirPurifierStatus.ActiveNightMode));
nightModeService.getCharacteristic(Characteristic.On)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
device.setAirPurifierState({
status: value ? AirPurifierStatus.ActiveNightMode : AirPurifierStatus.Active,
})
})
/* Optional: Filter Maintenance Service */
if (device.interfaces.includes(ScryptedInterface.FilterMaintenance)) {
const filterMaintenanceService = accessory.addService(Service.FilterMaintenance, device.name);
bindCharacteristic(device, ScryptedInterface.FilterMaintenance, filterMaintenanceService, Characteristic.FilterLifeLevel,
() => device.filterLifeLevel)
bindCharacteristic(device, ScryptedInterface.FilterMaintenance, filterMaintenanceService, Characteristic.FilterChangeIndication,
() => {
if (device.filterChangeIndication)
return Characteristic.FilterChangeIndication.CHANGE_FILTER;
return Characteristic.FilterChangeIndication.FILTER_OK;
})
}
return accessory;
}
});

View File

@@ -64,15 +64,14 @@ export class H264Repacketizer {
extraPackets = 0;
fuaMax: number;
pendingFuA: RtpPacket[];
// log whether a stapa sps/pps has been seen.
// resets on every idr frame, to trigger codec information
// to be resent.
seenStapASps = false;
// the stapa packet that will be sent before an idr frame.
stapa: RtpPacket;
fuaMin: number;
constructor(public console: Console, public maxPacketSize: number, public codecInfo: {
sps: Buffer,
pps: Buffer,
sei?: Buffer,
}, public jitterBuffer = new JitterBuffer(console, 4)) {
// 12 is the rtp/srtp header size.
this.fuaMax = maxPacketSize - FU_A_HEADER_SIZE;
@@ -98,6 +97,11 @@ export class H264Repacketizer {
this.codecInfo.pps = pps;
}
updateSei(sei: Buffer) {
this.ensureCodecInfo();
this.codecInfo.sei = sei;
}
shouldFilter(nalType: number) {
// currently nothing is filtered, but it seems that some SEI packets cause issues
// and should be ignored, while others show up in the stap-a sps/pps packet
@@ -202,6 +206,14 @@ export class H264Repacketizer {
return datas.shift();
}
// a single nalu stapa is unnecessary, return the nalu itself.
// this can happen when trying to packetize multiple nalus into a stapa
// and the last nalu does not fit into the first stapa, and ends up in
// a new stapa.
if (counter === 1) {
return payload[1];
}
payload.unshift(Buffer.from([stapHeader]));
return Buffer.concat(payload);
}
@@ -266,7 +278,7 @@ export class H264Repacketizer {
}
else {
if (splitNaluType === NAL_TYPE_IDR)
this.maybeSendSpsPps(first, ret);
this.maybeSendStapACodecInfo(first, ret);
this.fragment(first, ret, {
payload: split,
@@ -319,11 +331,20 @@ export class H264Repacketizer {
});
}
maybeSendSpsPps(packet: RtpPacket, ret: RtpPacket[]) {
maybeSendStapACodecInfo(packet: RtpPacket, ret: RtpPacket[]) {
if (this.stapa) {
// stapa with codec information was sent recently, no need to send codec info.
this.stapa = undefined;
return;
}
if (!this.codecInfo?.sps || !this.codecInfo?.pps)
return;
const aggregates = this.packetizeStapA([this.codecInfo.sps, this.codecInfo.pps]);
const agg = [this.codecInfo.sps, this.codecInfo.pps];
if (this.codecInfo?.sei)
agg.push(this.codecInfo.sei);
const aggregates = this.packetizeStapA(agg);
if (aggregates.length !== 1) {
this.console.error('expected only 1 packet for sps/pps stapa');
return;
@@ -406,9 +427,7 @@ export class H264Repacketizer {
// the stream may not contain codec information in stapa or may be sending it
// in separate sps/pps packets which is not supported by homekit.
if (originalNalType === NAL_TYPE_IDR) {
if (!this.seenStapASps)
this.maybeSendSpsPps(packet, ret);
this.seenStapASps = false;
this.maybeSendStapACodecInfo(packet, ret);
}
}
@@ -451,26 +470,43 @@ export class H264Repacketizer {
else if (nalType === NAL_TYPE_STAP_A) {
this.flushPendingFuA(ret);
// break the aggregated packet up and send it.
const depacketized = depacketizeStapA(packet.payload)
.filter(payload => {
let hasSps = false;
let hasPps = false;
// break the aggregated packet up to update codec information.
depacketizeStapA(packet.payload)
.forEach(payload => {
const nalType = payload[0] & 0x1F;
this.seenStapASps = this.seenStapASps || (nalType === NAL_TYPE_SPS);
if (this.shouldFilter(nalType)) {
return false;
}
if (nalType === NAL_TYPE_SPS)
if (nalType === NAL_TYPE_SPS) {
hasSps = true;
this.updateSps(payload);
if (nalType === NAL_TYPE_PPS)
}
else if (nalType === NAL_TYPE_PPS) {
hasPps = true;
this.updatePps(payload);
return true;
}
else if (nalType === NAL_TYPE_SEI) {
this.updateSei(payload);
}
else if (nalType === NAL_TYPE_DELIMITER) {
// this is uncommon but has been seen. seems to be a no-op nalu.
}
else if (nalType === NAL_TYPE_NON_IDR) {
// this is uncommon but has been seen. oddly, on reolink this non-idr was sent
// after the codec information. so codec information can be changed between
// idr and non-idr? maybe it is not applied until next idr?
}
else {
this.console.warn('Skipped a stapa type. Please report this to @koush on Discord.', nalType)
}
});
if (depacketized.length === 0) {
this.extraPackets--;
return;
}
const aggregates = this.packetizeStapA(depacketized);
this.createRtpPackets(packet, aggregates, ret);
// log that a stapa with codec info was sent
if (hasSps && hasPps)
this.stapa = packet;
const stapa = this.packetizeStapA(depacketizeStapA(packet.payload));
this.createRtpPackets(packet, stapa, ret);
}
else if (nalType >= 1 && nalType < 24) {
this.flushPendingFuA(ret);
@@ -491,6 +527,11 @@ export class H264Repacketizer {
this.updatePps(packet.payload);
return;
}
else if (nalType === NAL_TYPE_SEI) {
this.extraPackets--;
this.updateSei(packet.payload);
return;
}
if (this.shouldFilter(nalType)) {
this.extraPackets--;
@@ -500,9 +541,7 @@ export class H264Repacketizer {
if (nalType === NAL_TYPE_IDR) {
// if this is an idr frame, but no sps has been sent, dummy one up.
// the stream may not contain sps.
if (!this.seenStapASps)
this.maybeSendSpsPps(packet, ret);
this.seenStapASps = false;
this.maybeSendStapACodecInfo(packet, ret);
}
this.fragment(packet, ret);

View File

@@ -95,7 +95,7 @@ export class JitterBuffer {
// missed/late bunch of packets
if (packetDistance > this.jitterSize) {
this.console.log('jitter buffer skipped packets:', packetDistance);
// this.console.log('jitter buffer skipped packets:', packetDistance);
const { lastSequenceNumber } = this;
this.lastSequenceNumber = sequenceNumber - this.jitterSize;
// use the previous sequence number to flush any packets that are too old compared

Some files were not shown because too many files have changed in this diff Show More