Compare commits

...

235 Commits

Author SHA1 Message Date
Koushik Dutta
1688fcc126 Merge branch 'main' of github.com:koush/scrypted 2023-06-09 16:17:43 -07:00
Koushik Dutta
99cae0ba31 docker: use nonfree intel media drivers 2023-06-09 16:17:39 -07:00
Koushik Dutta
a7b00b9e91 Update docker-common.yml 2023-06-08 21:18:44 -07:00
Koushik Dutta
3f2a62c6f2 docker: fix dist upgrade 2023-06-08 21:08:35 -07:00
Koushik Dutta
3fc318a370 Update docker.yml 2023-06-08 18:16:46 -07:00
Koushik Dutta
aed8575aa0 github: pi only allows 1 key on default acct 2023-06-08 17:54:08 -07:00
Koushik Dutta
2e28b50588 github: add rpi 4 builder 2023-06-08 17:41:32 -07:00
Koushik Dutta
2e87cc380f github: add rpi 4 builder 2023-06-08 17:34:00 -07:00
Koushik Dutta
1fdd2d4b01 github: rename secret priv key 2023-06-08 17:23:18 -07:00
Koushik Dutta
53b23b2ca8 Merge branch 'main' of github.com:koush/scrypted 2023-06-08 17:18:02 -07:00
Koushik Dutta
54016a9c78 github: update build push action 2023-06-08 17:17:58 -07:00
Koushik Dutta
d207a3b824 docker: switch from wget to curl 2023-06-08 17:16:54 -07:00
Koushik Dutta
e72a74d008 docker: clean up lite builds 2023-06-08 15:29:08 -07:00
Koushik Dutta
d1b907e45b Merge branch 'main' of github.com:koush/scrypted 2023-06-08 15:17:22 -07:00
Koushik Dutta
4a4c47ffe2 docker: clean up lite builds 2023-06-08 15:16:53 -07:00
Koushik Dutta
f6baf99935 Update docker.yml 2023-06-08 14:36:47 -07:00
Koushik Dutta
b5cc138e2b Update docker-common.yml 2023-06-08 14:33:28 -07:00
Koushik Dutta
40738a74cf Update docker-common.yml 2023-06-08 14:23:39 -07:00
Koushik Dutta
d2b1f104ca Update docker-common.yml 2023-06-08 14:17:10 -07:00
Koushik Dutta
6cb4f589c0 Update docker-common.yml 2023-06-08 14:10:01 -07:00
Koushik Dutta
5cf2b26630 Update docker-common.yml 2023-06-08 14:07:37 -07:00
Koushik Dutta
e7f16af04c Update docker-common.yml 2023-06-08 14:06:58 -07:00
Koushik Dutta
6287b9deaa Update docker-common.yml 2023-06-08 13:47:01 -07:00
Koushik Dutta
b9b5fdb712 docker: remove for loop 2023-06-08 13:40:39 -07:00
Koushik Dutta
c85af9c8a5 Merge branch 'main' of github.com:koush/scrypted 2023-06-08 13:36:25 -07:00
Koushik Dutta
069f765507 linux: fix multi python install 2023-06-08 13:36:23 -07:00
Koushik Dutta
0e587abc79 Update docker-common.yml 2023-06-08 13:27:11 -07:00
Koushik Dutta
47770c0a8d Update docker-common.yml 2023-06-08 13:18:23 -07:00
Koushik Dutta
82d1c3afe5 docker: revert sh expression 2023-06-08 12:54:54 -07:00
Koushik Dutta
1c9b52ce4f docker: move intel stuff into footer 2023-06-08 11:51:47 -07:00
Koushik Dutta
adcd9fa537 linux: move intel stuff out since it requires jammy 2023-06-08 11:47:06 -07:00
Koushik Dutta
91e2c2870b linux: quote commands for execution 2023-06-08 10:51:57 -07:00
Koushik Dutta
1fc892815d docker: fix piping 2023-06-08 10:32:07 -07:00
Koushik Dutta
38ed1acc15 docker: fix typo 2023-06-08 10:20:51 -07:00
Koushik Dutta
3bdc9ab930 docker: use intel repos for jammy 2023-06-08 10:11:02 -07:00
Koushik Dutta
bfa6346333 linux: fix dockerfile translation/exec 2023-06-08 10:04:19 -07:00
Koushik Dutta
fcbb308cb8 install: fix linux local syntax 2023-06-08 09:54:36 -07:00
Koushik Dutta
f137edcc8c install: fix linux local syntax 2023-06-08 09:53:17 -07:00
Koushik Dutta
53e6f083b9 docker: working jammy + tflite 2023-06-08 09:46:38 -07:00
Koushik Dutta
0f96fdb4bc tensorflow-lite: publish 2023-06-08 09:28:08 -07:00
Koushik Dutta
96ea3f3b27 postbeta 2023-06-08 09:22:54 -07:00
Koushik Dutta
a31d6482af postbeta 2023-06-08 09:12:21 -07:00
Koushik Dutta
be16bf7858 postbeta 2023-06-08 08:50:40 -07:00
Koushik Dutta
1dad0126bc postbeta 2023-06-08 08:08:24 -07:00
Koushik Dutta
9292ebbe48 tensorflow-lite: fix missing settings, add python version hints 2023-06-08 07:54:41 -07:00
Koushik Dutta
0b3a1a1998 docker: update before install 2023-06-07 16:25:22 -07:00
Koushik Dutta
b5d58b6899 Merge branch 'main' of github.com:koush/scrypted 2023-06-07 16:11:30 -07:00
Koushik Dutta
215a56f70e docker: jammy default 2023-06-07 16:11:08 -07:00
Koushik Dutta
c593701e72 gh: Update docker.yml 2023-06-07 15:59:53 -07:00
Koushik Dutta
46351f2fd7 docs: update 2023-06-07 15:22:35 -07:00
Koushik Dutta
9bce4acd14 postbeta 2023-06-07 15:20:38 -07:00
Koushik Dutta
cba20ec887 postbeta 2023-06-07 15:18:48 -07:00
Koushik Dutta
7c41516cce python-codecs: fix stride handling 2023-06-07 15:10:40 -07:00
Koushik Dutta
1f209072ba opencv: relax threshold defaults 2023-06-07 15:09:04 -07:00
Koushik Dutta
8978bff8a9 postbeta 2023-06-07 10:32:52 -07:00
Koushik Dutta
04c500b855 sdk: update 2023-06-07 10:32:18 -07:00
Koushik Dutta
8b4859579c rebroadcast: strip out all legacy audio handling 2023-06-07 08:34:45 -07:00
Koushik Dutta
90deaf1161 postbeta 2023-06-07 08:22:23 -07:00
Koushik Dutta
de56a8c653 server: remove dead code 2023-06-07 08:22:15 -07:00
Koushik Dutta
a5215ae92b Merge branch 'main' of github.com:koush/scrypted 2023-06-07 08:17:22 -07:00
Koushik Dutta
73cd40b540 server: strip and update dependencies 2023-06-07 08:17:13 -07:00
Koushik Dutta
93556dd404 postbeta 2023-06-07 07:40:15 -07:00
Brett Jia
125b436cb6 arlo: upstreaming changes (#844)
* remove webrtc emulation

* turn on two way audio by default

* add arloq pings and tweak log messages

* bump for release

* bump scrypted-arlo-go to remove unused code

* add arloqs pings

* better 2fa selection error msg + get sipinfo

* wip sip

* re-enable basestation push to talk

* bump for 0.7.24 release

* bump to working wheels

* disable MQTT backend and use SSE as default

* some login error handling

* remove dependency on cryptography and switch back to scrypted tool

* bump for 0.7.27 release

* implement DASH container

* expand documentation

* expand documentation

* bump for 0.7.28 beta

* discourage DASH further

* cleaner container selection

* tweak documentation

* tweak documentation

* bump for 0.7.29 release
2023-06-04 07:29:45 -04:00
Koushik Dutta
0a4ea032f5 client: include hostname property in login challenge 2023-06-02 15:36:05 -07:00
slyoldfox
c658cee5c9 sip: v0.0.9
* * Fix an issues in SIP.js where the ACK and BYE replies didn't go to the correct uri

* * Implemented outgoing SIP MESSAGE sending
* Adding voice mail check
* Adding a lock for a bticino doorbell

* Cleanup dependencies, code in sip, bticino plugins

* Cleanup dependencies, code in sip, bticino plugins

* Clear stale devices from our map and clear the voicemail check

* Do not require register() for a SIP call

* Narrow down the event matching to deletes of devices

* Use releaseDevice to clean up stale entries

* Fix uuid version

* Attempt to make two way audio work

* Attempt to make two way audio work - fine tuning

* Enable incoming doorbell events

* SipCall was never a "sip call" but more like a manager
SipSession was more the "sip call"

* * Rename sip registered session to persistent sip manager
* Allow handling of call pickup in homekit (hopefully!)

* * use the consoles from the camera object

* * use the consoles from the camera object

* * Fix the retry timer

* * Added webhook url

* * parse record route correctly

* * Add gruu and use a custom fork of sip.js which supports keepAlive SIP clients (and dropped Websocket)
* use cross-env in package.json

* Added webhook urls for faster handling of events

* Added videoclips

* plugins/sip 0.0.6

* plugins/bticino 0.0.7

* Implemented Reboot interface

* v0.0.9 which works with c300-controller

* better validation during creation of device
* automatically sets the correct settings depending on the data sent back from the controller

---------

Co-authored-by: Marc Vanbrabant <marc@foreach.be>
2023-06-02 13:37:52 -04:00
Koushik Dutta
6589176c8b Merge branch 'main' of github.com:koush/scrypted 2023-06-01 20:33:33 -07:00
Koushik Dutta
6c4c83f655 rebroadcast: hack fix for ffmpeg sdp race condition 2023-06-01 20:33:28 -07:00
Billy Zoellers
8d4124adda add types to support Air Purifier (#833)
* add types to support Air Purifier

* fix homekit type for airpurifier
2023-06-01 15:07:25 -04:00
Brett Jia
b7cda86df7 fix typo reported by community member (#831) 2023-05-29 17:23:24 -07:00
Koushik Dutta
6622e13e51 openvino: fix setting typo 2023-05-29 15:11:41 -07:00
Koushik Dutta
cbc45da679 openvino: add setting for compute target 2023-05-29 15:07:19 -07:00
Koushik Dutta
e7d06c66af gha: only do s6 builds 2023-05-29 10:21:57 -07:00
Koushik Dutta
ea02bc3b6f github: switch to jammy 2023-05-29 10:21:28 -07:00
Koushik Dutta
2b43cb7d15 postbeta 2023-05-29 10:20:00 -07:00
Koushik Dutta
f3c0362e18 server: prep for python3.10 2023-05-29 10:19:51 -07:00
Koushik Dutta
817ae42250 docker: fix install prompts 2023-05-28 19:58:12 -07:00
Koushik Dutta
8043f83f20 github: self hosted runner 2023-05-28 15:55:12 -07:00
Koushik Dutta
d33ab5dbcf gihub: self hosted runner 2023-05-28 15:54:50 -07:00
Koushik Dutta
2b1674bea8 docker/github: switch to jammy 2023-05-28 15:38:40 -07:00
Koushik Dutta
f045e59258 docker: normalize Dockerfile across arch 2023-05-28 12:57:55 -07:00
Koushik Dutta
9125aafc07 openvino: rollback 2023-05-28 12:55:13 -07:00
Koushik Dutta
6f5244ec9f videoanalysis: correctly pass motion zones to object detector 2023-05-28 09:01:21 -07:00
Koushik Dutta
f1eb2f988a openvino: unlock version for jammy 2023-05-27 23:10:35 -07:00
Koushik Dutta
1f659d9a72 python-codecs: move dimensions into caps 2023-05-27 23:09:42 -07:00
Koushik Dutta
dd98f12f2a python-codecs: fix pil rgba to jpg. fix image close race condition. 2023-05-27 22:46:55 -07:00
Koushik Dutta
2063e3822a docker: focal builds 2023-05-27 20:25:10 -07:00
Koushik Dutta
f7495a7a76 docker: update base image fingerprint 2023-05-27 18:16:45 -07:00
Koushik Dutta
fddb9c655f docker: use lunar 2023-05-27 18:05:32 -07:00
Koushik Dutta
297e7a7b4f docker: use jammy and lunar 2023-05-27 17:51:05 -07:00
Koushik Dutta
29e080f6b6 docker: switch back to ubuntu for better driver supports and deadsnakes ppa 2023-05-27 17:49:12 -07:00
Koushik Dutta
c72ea24794 python-codecs: fix vaapi post procesisng 2023-05-27 10:22:31 -07:00
Koushik Dutta
ada80796de homekit: fix basic fans 2023-05-27 09:37:30 -07:00
Koushik Dutta
1ebcf32998 python-codecs: fix vaapi gray output 2023-05-26 14:16:50 -07:00
Koushik Dutta
79765ba58e python-codecs: fix assert spam, code cleanups 2023-05-26 08:56:27 -07:00
Koushik Dutta
ff4665520c python-codecs: bug fixes 2023-05-25 23:34:49 -07:00
Koushik Dutta
be5b810335 python-codecs: cleanup code, add some fast paths 2023-05-25 23:08:15 -07:00
Koushik Dutta
fdc99b7fa6 python-codecs: major refactor to support hw acceleration and on demand color space conversion 2023-05-25 10:48:25 -07:00
Koushik Dutta
f730d13cbd ring: fix busted ass ring polling/push 2023-05-24 17:58:51 -07:00
Koushik Dutta
af02753cef server/core: support built in server updates 2023-05-23 12:04:02 -07:00
Koushik Dutta
9334d1c2a4 server: fix potential plugin startup hang 2023-05-23 08:48:26 -07:00
Koushik Dutta
71ecc07e2b webrtc: respect device pixel ratio 2023-05-23 01:44:29 -07:00
Koushik Dutta
5310dd5ff6 ui: social, account creation cleanups 2023-05-22 19:01:15 -07:00
Koushik Dutta
adf1a10659 sdk: image resize filters 2023-05-22 09:45:21 -07:00
Koushik Dutta
2ecc26c914 docker: use new install env var 2023-05-22 08:52:56 -07:00
Koushik Dutta
9a49416831 ha: use diff env var 2023-05-22 08:51:45 -07:00
Koushik Dutta
f0eff01898 ha: bump version, add env variable to prevent future notifications 2023-05-22 08:50:05 -07:00
Koushik Dutta
edd071739f python-codecs: dont feed preroll into queue 2023-05-21 22:48:06 -07:00
Koushik Dutta
ab81c568bc sdk: update 2023-05-21 22:44:14 -07:00
Koushik Dutta
62470df0af server: fix env anon login 2023-05-21 21:54:12 -07:00
Koushik Dutta
19b83eb056 postrelease 2023-05-21 21:53:43 -07:00
Koushik Dutta
b75d4cbfd4 postbeta 2023-05-21 21:52:41 -07:00
Koushik Dutta
8c0bb7b205 postrelease 2023-05-21 14:51:13 -07:00
Koushik Dutta
ef64515e56 python sample: readme 2023-05-20 22:08:25 -07:00
Koushik Dutta
302272e437 Merge branch 'main' of github.com:koush/scrypted 2023-05-20 22:07:45 -07:00
Koushik Dutta
80e433f6ef python sample: readme 2023-05-20 22:07:40 -07:00
Raman Gupta
60786aba2b Fix python SDK types (#817) 2023-05-20 21:32:43 -07:00
Brett Jia
256fde46f6 arlo: eco mode for snapshot throttling + disable experimental features (#816)
* eco mode, download images, remove experimental intercom

* rename imported var

* bump for release
2023-05-20 19:22:59 -07:00
Koushik Dutta
e1a7dd367e postbeta 2023-05-19 22:27:49 -07:00
Koushik Dutta
8612ba3462 postbeta 2023-05-19 22:06:16 -07:00
Koushik Dutta
ab638f26be postbeta 2023-05-19 21:49:31 -07:00
Koushik Dutta
02b881a2d2 postbeta 2023-05-19 21:41:15 -07:00
Koushik Dutta
35475b03e2 docker: pci coral example 2023-05-19 19:10:30 -07:00
Koushik Dutta
0b55c777f8 alexa: publish w/ storage fix 2023-05-19 12:22:10 -07:00
Koushik Dutta
68f86d214c Merge branch 'main' of github.com:koush/scrypted 2023-05-18 20:10:44 -07:00
Koushik Dutta
2abea2d25b videoanalysis: logging 2023-05-18 20:10:36 -07:00
Brett Jia
1c2f17b9f9 arlo: backup code path for cloudflare 403 (#809)
* experimental 403 fix

* use ips directly

* fix crash on empty device list

* bump for beta

* test post before returning

* bump for beta

* use cloudflare by default

* bump for beta

* bump for release
2023-05-18 19:23:15 -07:00
Koushik Dutta
e3d4800e4f python-codecs: implement image close 2023-05-17 21:03:51 -07:00
Koushik Dutta
d2f175715b webrtc: fix local transport detection on ipv6t 2023-05-17 21:03:04 -07:00
Koushik Dutta
93c1a699f1 snapshot: publish 2023-05-17 21:02:56 -07:00
Koushik Dutta
41570e9134 h264: publish 2023-05-17 14:24:30 -07:00
Koushik Dutta
3ef75854c2 postbeta 2023-05-17 11:35:51 -07:00
Koushik Dutta
c88a638f4e server: fix level db lock error not being reported 2023-05-17 11:35:42 -07:00
Koushik Dutta
793c4da33a postbeta 2023-05-17 10:59:48 -07:00
Koushik Dutta
68f071660e server: await server port listeners 2023-05-17 10:59:40 -07:00
Koushik Dutta
8ea5b6aca6 postbeta 2023-05-17 10:53:16 -07:00
Koushik Dutta
2f13c77444 server: add default admin login via token 2023-05-17 10:52:50 -07:00
Koushik Dutta
981ad183f5 gh: remove bookworm builds 2023-05-16 22:00:53 -07:00
Koushik Dutta
8748be82ef docker: add xz-utils 2023-05-16 20:30:08 -07:00
Koushik Dutta
a347fc2b73 gh: fix yaml 2023-05-16 19:41:55 -07:00
Koushik Dutta
002bf3b52c gh: fix yaml 2023-05-16 19:37:31 -07:00
Koushik Dutta
72abcd79ec gh: fix base bookworm image args 2023-05-16 19:19:10 -07:00
Koushik Dutta
86e5b824c7 docker: fix bookworm pip issues 2023-05-16 18:09:22 -07:00
Koushik Dutta
43f6f176f0 gh: fix docker common path 2023-05-16 15:21:10 -07:00
Koushik Dutta
bc543aa28e docker: add bookworm target 2023-05-16 15:11:30 -07:00
Koushik Dutta
e90db378e8 install: remove aiofiles dependency 2023-05-16 14:46:07 -07:00
Koushik Dutta
f2907532aa snapshot: cleanup timeout handling 2023-05-16 10:54:46 -07:00
Koushik Dutta
866706505a python-codecs: support vaapi color conversion 2023-05-15 14:37:22 -07:00
Koushik Dutta
59db3b622c ha: release 2023-05-15 13:10:52 -07:00
Koushik Dutta
7451b9903a rebroadcast: add ffmpeg transcode 2023-05-15 13:10:14 -07:00
Koushik Dutta
aded2e43b1 rebroadcast: support output transcoidng 2023-05-15 10:49:48 -07:00
Koushik Dutta
031a7527e1 ha: publish 2023-05-15 08:22:45 -07:00
Koushik Dutta
2aca568707 ha: publish 2023-05-14 14:10:21 -07:00
Koushik Dutta
6b22d34831 Merge branch 'main' of github.com:koush/scrypted 2023-05-14 14:08:39 -07:00
root
429d9ec5a6 ha: add home assistant api 2023-05-14 13:59:49 -07:00
Koushik Dutta
b426668146 postbeta 2023-05-14 13:56:18 -07:00
Koushik Dutta
8bce14f834 server: support auto installation of single plugin via SCRYPTED_INSTALL_PLUGIN 2023-05-14 13:56:08 -07:00
Koushik Dutta
7511abf768 various plugins: publish rpc gc churn fix 2023-05-13 14:11:32 -07:00
Koushik Dutta
180c12e8cc rebroadcast: create dirs before writing rtsp file 2023-05-13 14:03:36 -07:00
Koushik Dutta
1ed7d03a20 client: detection jpeg example 2023-05-13 07:43:49 -07:00
Koushik Dutta
9e7b57f154 python-codecs: fix process exit before aclose finish. publish betas. 2023-05-12 21:57:34 -07:00
Koushik Dutta
205fdb0222 fix per frame rpc gc churn 2023-05-12 20:26:18 -07:00
Koushik Dutta
d8f3edee1e core: fix init order 2023-05-12 20:25:52 -07:00
Koushik Dutta
90c9efc8a6 rebroadcast: use highwatermark for nvr perf improvement 2023-05-11 10:20:28 -07:00
Koushik Dutta
3893ccd776 Merge branch 'main' of github.com:koush/scrypted 2023-05-11 08:12:30 -07:00
Koushik Dutta
1b154f14bc docs: update 2023-05-11 08:12:25 -07:00
Koushik Dutta
2e3eba4350 Update test.yml 2023-05-10 22:29:45 -07:00
Koushik Dutta
450f05910a Update test.yml 2023-05-10 22:28:27 -07:00
Brett Jia
22505c9226 arlo: charger interface + fix cloudflare api (#784)
* add Charger interface

* update settings documentation

* arlo: fix cloudflare api issues
2023-05-10 15:01:09 -07:00
Koushik Dutta
7120bf86ff snapshot: fix description 2023-05-10 13:22:28 -07:00
Koushik Dutta
b49742204f ha: publish 2023-05-10 11:00:03 -07:00
Koushik Dutta
6fda76a5e8 gh: support versioned publish tags 2023-05-10 10:37:00 -07:00
Koushik Dutta
08bd785d45 gh: remove ha workflow 2023-05-10 08:19:49 -07:00
Koushik Dutta
aa9ddb35aa ha: restructure 2023-05-10 08:05:13 -07:00
Koushik Dutta
7997c07179 ha: use standard docker image 2023-05-10 08:04:36 -07:00
Koushik Dutta
a67e24d5dc github: fix action version 2023-05-09 19:05:01 -07:00
Koushik Dutta
0d4da0dd06 server: build off npm tag and determine version in giuthub action 2023-05-09 19:03:34 -07:00
Koushik Dutta
993e903f3b postbeta 2023-05-09 19:00:50 -07:00
Koushik Dutta
fbb11a5312 server: allow ip based admin auth 2023-05-09 19:00:39 -07:00
Koushik Dutta
ea72d2159b ha: add logos/icons 2023-05-09 12:43:09 -07:00
Koushik Dutta
1892fdb529 ha: publish 2023-05-09 10:47:11 -07:00
Koushik Dutta
1e16793b20 ha: move recordings out of the volume 2023-05-09 10:44:54 -07:00
Koushik Dutta
2f6c577b47 README: add HAOS 2023-05-09 10:08:31 -07:00
Koushik Dutta
212306449b wip 2023-05-09 10:06:19 -07:00
Koushik Dutta
16445bc38e ha: fix image 2023-05-09 08:49:45 -07:00
Koushik Dutta
b6e9e15d4f client: clear hash/query 2023-05-09 08:00:12 -07:00
Koushik Dutta
39abd49ea0 ha: autogenerate admin token 2023-05-09 07:59:57 -07:00
Koushik Dutta
05b9b49732 core: escape iframe for logins 2023-05-08 23:07:32 -07:00
Koushik Dutta
1857acac66 core: fix base url hash/query leak 2023-05-08 22:02:14 -07:00
Koushik Dutta
fedf184847 core: fix ingress shell 2023-05-08 21:47:41 -07:00
Koushik Dutta
d2afac0dd6 Merge branch 'main' of github.com:koush/scrypted 2023-05-08 14:48:27 -07:00
Koushik Dutta
6844b55983 ha: release 2023-05-08 14:48:19 -07:00
Koushik Dutta
379dabc182 ha: log in to dockerhub and ghcr 2023-05-08 14:11:28 -07:00
Koushik Dutta
df3c751f2d ha: fix missing env 2023-05-08 13:53:40 -07:00
Koushik Dutta
da714d1f94 ha: fix Dockerfile arch 2023-05-08 13:16:25 -07:00
Koushik Dutta
34ee29b7b4 Merge branch 'main' of github.com:koush/scrypted 2023-05-08 13:10:28 -07:00
Koushik Dutta
4c48f50e01 ha: fix Dockerfile missing var 2023-05-08 13:10:20 -07:00
Koushik Dutta
81a5a4349c docker: checkout repo 2023-05-08 11:28:52 -07:00
Koushik Dutta
8526c92dcc postbeta 2023-05-08 11:28:07 -07:00
Koushik Dutta
73fefeec26 server: relative redirects 2023-05-08 11:27:48 -07:00
Koushik Dutta
6060b50856 docker/ha: ha fixes 2023-05-08 11:27:37 -07:00
Koushik Dutta
d29cd7e421 ha: addon 2023-05-08 11:24:55 -07:00
Koushik Dutta
8589283135 postbeta 2023-05-07 22:21:31 -07:00
Koushik Dutta
837dae5f02 server: add support for long term token access 2023-05-07 22:21:19 -07:00
Koushik Dutta
c26aa2d01e client: publish 2023-05-07 22:12:16 -07:00
Koushik Dutta
c98eca23ab mqtt: publiush 2023-05-07 14:01:54 -07:00
Koushik Dutta
eb5d1ac4f6 client: squelch logging 2023-05-06 12:01:37 -07:00
Koushik Dutta
37b0e46dd0 rebroadcast: Fix audio codec parsing bug 2023-05-06 12:01:25 -07:00
Koushik Dutta
042dd84520 core: fix non-root repl and console 2023-05-05 17:31:38 -07:00
Koushik Dutta
62d5c145c2 core/client: support endpoints that are proxied from a non root webroot 2023-05-04 22:53:34 -07:00
Koushik Dutta
1ea3774849 videoanalysis: default to snapshot mode < 4 cpu 2023-05-03 21:16:03 -07:00
Koushik Dutta
9d8345e901 videoanalysis/motion: implement video rate control 2023-05-03 15:57:37 -07:00
Koushik Dutta
9ed850e327 rebroadcast/webrtc: fixup pcm_ulaw handling 2023-05-03 14:11:17 -07:00
Koushik Dutta
957d27b8ef rebroadcast: revert publish 2023-05-03 13:23:05 -07:00
Koushik Dutta
b74a957ecb Revert "rebroadcast: publish"
This reverts commit debaedfd8c.
2023-05-03 12:48:06 -07:00
Koushik Dutta
debaedfd8c rebroadcast: publish 2023-05-03 12:39:50 -07:00
Koushik Dutta
0123a97e3c videoanalysis: allow sleep on motion 2023-05-02 10:23:00 -07:00
Koushik Dutta
a32d47e192 postbeta 2023-05-02 09:43:51 -07:00
Koushik Dutta
90ed8bd3f5 server: reboot interface support 2023-05-02 09:43:42 -07:00
Koushik Dutta
c4f4002f55 sdk: publish 2023-05-01 23:13:27 -07:00
Koushik Dutta
1ea2828e78 plugins/sdk: add support for rebooting devices 2023-05-01 23:04:05 -07:00
Koushik Dutta
eb864456df sdk: rebuild 2023-05-01 20:00:15 -07:00
Koushik Dutta
51af4f07ff postbeta 2023-04-30 23:25:32 -07:00
Koushik Dutta
f6201acf2a server: add runtime kill 2023-04-30 23:25:23 -07:00
Koushik Dutta
96ac479c73 postbeta 2023-04-30 08:54:29 -07:00
Koushik Dutta
0c08875de3 server: build scripts now rev minor version, while flavors rev minor 2023-04-30 08:54:12 -07:00
Koushik Dutta
bd05fc1b5d postbeta 2023-04-29 18:35:38 -07:00
Koushik Dutta
5a0d325718 server: remove aiofiles dependency 2023-04-29 18:35:28 -07:00
Koushik Dutta
015794c1d1 sdk: update 2023-04-29 11:30:42 -07:00
Koushik Dutta
02d5b429b7 Revert "server: add hook for getting runtime"
This reverts commit e169d154e7.
2023-04-29 11:30:20 -07:00
Koushik Dutta
e169d154e7 server: add hook for getting runtime 2023-04-29 11:09:56 -07:00
Koushik Dutta
01c7b5674a postbeta 2023-04-29 08:34:04 -07:00
Koushik Dutta
a7a1aed0dc client: update 2023-04-28 10:12:02 -07:00
Koushik Dutta
6bb3f0fd19 sdk: additional video clip thumbnail options 2023-04-28 08:58:59 -07:00
Koushik Dutta
7828de9d50 sdk: add thumbnail side option to video clip requests 2023-04-28 08:46:57 -07:00
Koushik Dutta
ea77bb29d0 postrelease 2023-04-28 08:28:49 -07:00
207 changed files with 4024 additions and 2551 deletions

View File

@@ -1,50 +0,0 @@
name: Publish Scrypted (git HEAD)
on:
workflow_dispatch:
release:
types: [published]
jobs:
push_to_registry:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
strategy:
matrix:
node: ["16-bullseye"]
steps:
- name: Check out the repo
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to Github Container Registry
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted)
uses: docker/build-push-action@v2
with:
build-args: BASE=${{ matrix.node }}
context: .
file: docker/Dockerfile.HEAD
platforms: linux/amd64,linux/arm64,linux/armhf
push: true
tags: |
koush/scrypted:HEAD
ghcr.io/koush/scrypted:HEAD
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -2,54 +2,69 @@ name: Publish Scrypted Common
on:
workflow_dispatch:
release:
types: [published]
schedule:
# publish the common base once a month.
- cron: '30 8 2 * *'
jobs:
push_to_registry:
build:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
runs-on: self-hosted
strategy:
matrix:
NODE_VERSION: ["18"]
BUILDPACK_DEPS_BASE: ["bullseye"]
BASE: ["jammy"]
FLAVOR: ["full", "lite", "thin"]
steps:
- name: Check out the repo
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: 192.168.2.124
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: 192.168.2.119
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://koush@192.168.2.124
platforms: linux/arm64
- endpoint: ssh://koush@192.168.2.119
platforms: linux/armhf
- name: Login to Docker Hub
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to Github Container Registry
uses: docker/login-action@v1
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted-common)
uses: docker/build-push-action@v2
uses: docker/build-push-action@v4
with:
build-args: NODE_VERSION=${{ matrix.NODE_VERSION }}
context: docker/
file: docker/Dockerfile.${{ matrix.FLAVOR }}
platforms: linux/amd64,linux/arm64,linux/armhf
build-args: |
NODE_VERSION=${{ matrix.NODE_VERSION }}
BASE=${{ matrix.BASE }}
context: install/docker/
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
platforms: linux/amd64,linux/armhf,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BUILDPACK_DEPS_BASE }}-${{ matrix.FLAVOR }}
# ${{ matrix.NODE_VERSION == '16-bullseye' && 'koush/scrypted-common:latest' || '' }}
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,48 +1,65 @@
name: Publish Scrypted
name: Publish Scrypted Docker Image
on:
workflow_dispatch:
inputs:
docker_tag:
description: 'Docker Tag'
tag:
description: "The npm tag used to build the Docker image. The tag will be resolved as a specific version on npm, and that will be used to version the docker image."
required: true
package_version:
description: 'Package Version'
publish_tag:
description: "The versioned tag for the published Docker image. NPM will use the minor version, Docker should only specify a patch version."
required: false
release:
types: [published]
jobs:
push_to_registry:
build:
name: Push Docker image to Docker Hub
runs-on: ubuntu-latest
runs-on: self-hosted
strategy:
matrix:
BASE: ["18-bullseye-full", "18-bullseye-lite", "18-bullseye-thin"]
BASE: ["18-jammy-full", "18-jammy-lite", "18-jammy-thin"]
SUPERVISOR: ["", ".s6"]
steps:
- name: Check out the repo
uses: actions/checkout@v3
- name: get-npm-version
id: package-version
uses: martinbeentjes/npm-get-version-action@master
- name: NPM Package Request
id: npm-request
uses: fjogeleit/http-request-action@v1
with:
path: server
url: 'https://registry.npmjs.org/@scrypted/server'
method: 'GET'
- name: Print Version
run: echo "Version ${{ github.event.inputs.package_version || steps.package-version.outputs.current-version }}"
- name: Get current date
id: date
run: echo "::set-output name=date::$(date +'%Y-%m-%d')"
- name: Set NPM Version
id: package-version
run: echo "NPM_VERSION=${{ fromJson(steps.npm-request.outputs.response)['dist-tags'][ github.event.inputs.tag] }}" >> "$GITHUB_OUTPUT"
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: 192.168.2.124
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: 192.168.2.119
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://koush@192.168.2.124
platforms: linux/arm64
- endpoint: ssh://koush@192.168.2.119
platforms: linux/armhf
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
@@ -56,31 +73,31 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted)
uses: docker/build-push-action@v3
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
build-args: |
BASE=${{ matrix.BASE }}
SCRYPTED_INSTALL_VERSION=${{ github.event.inputs.package_version }}
context: docker/
file: docker/Dockerfile${{ matrix.SUPERVISOR }}
SCRYPTED_INSTALL_VERSION=${{ steps.package-version.outputs.NPM_VERSION }}
context: install/docker/
file: install/docker/Dockerfile${{ matrix.SUPERVISOR }}
platforms: linux/amd64,linux/arm64,linux/armhf
push: true
tags: |
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.package_version || steps.package-version.outputs.current-version) }}
${{ matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.docker_tag) || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '' && 'koush/scrypted:thin' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:thin-s6' || '' }}
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '' && 'koush/scrypted:thin' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:thin-s6' || '' }}
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.package_version || steps.package-version.outputs.current-version) }}
${{ matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.docker_tag) || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:thin' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.docker_tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:thin-s6' || '' }}
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:thin' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:thin-s6' || '' }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -19,7 +19,7 @@ jobs:
- name: Run install script
run: |
cat ./docker/install-scrypted-dependencies-linux.sh | sudo SERVICE_USER=$USER bash
cat ./install/local/install-scrypted-dependencies-linux.sh | sudo SERVICE_USER=$USER bash
- name: Test server is running
run: |
@@ -37,7 +37,7 @@ jobs:
- name: Run install script
run: |
mkdir -p ~/.scrypted
bash ./docker/install-scrypted-dependencies-mac.sh
bash ./install/local/install-scrypted-dependencies-mac.sh
- name: Test server is running
run: |
@@ -53,7 +53,7 @@ jobs:
- name: Run install script
run: |
.\docker\install-scrypted-dependencies-win.ps1
.\install\local\install-scrypted-dependencies-win.ps1
- name: Test server is running
run: |

View File

@@ -23,6 +23,7 @@ Select the appropriate guide. After installation is finished, remember to visit
* Windows
* [Local Installation](https://github.com/koush/scrypted/wiki/Installation:-Windows)
* [WSL2 Installation](https://github.com/koush/scrypted/wiki/Installation:-WSL2-Windows)
* [Home Assistant OS](https://github.com/koush/scrypted/wiki/Installation:-Home-Assistant-OS)
<!-- * Docker Desktop is [not supported](https://github.com/koush/scrypted/wiki/Installation:-Docker-Desktop). -->
* [ReadyNAS: Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-ReadyNAS)
* [Synology: Docker](https://github.com/koush/scrypted/wiki/Installation:-Docker-Synology-NAS)

View File

@@ -4,8 +4,11 @@ import { EventEmitter } from 'events';
import { Server } from 'net';
import { Duplex } from 'stream';
import { cloneDeep } from './clone-deep';
import { Deferred } from "./deferred";
import { listenZeroSingleClient } from './listen-cluster';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from './media-helpers';
import { createRtspParser } from "./rtsp-server";
import { parseSdp } from "./sdp-utils";
import { StreamChunk, StreamParser } from './stream-parser';
const { mediaManager } = sdk;
@@ -57,9 +60,13 @@ export async function parseResolution(cp: ChildProcess) {
}
async function parseInputToken(cp: ChildProcess, token: string) {
let processed = 0;
return new Promise<string>((resolve, reject) => {
cp.on('exit', () => reject(new Error('ffmpeg exited while waiting to parse stream information: ' + token)));
const parser = (data: Buffer) => {
processed += data.length;
if (processed > 10000)
return resolve(undefined);
const stdout: string = data.toString().split('Output ')[0];
const idx = stdout.lastIndexOf(`${token}: `);
if (idx !== -1) {
@@ -77,7 +84,11 @@ async function parseInputToken(cp: ChildProcess, token: string) {
};
cp.stdout.on('data', parser);
cp.stderr.on('data', parser);
});
})
.finally(() => {
cp.stdout.removeAllListeners('data');
cp.stderr.removeAllListeners('data');
});
}
export async function parseVideoCodec(cp: ChildProcess) {
@@ -158,8 +169,6 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
const args = ffmpegInput.inputArguments.slice();
let needSdp = false;
const ensureActive = (killed: () => void) => {
if (!isActive) {
killed();
@@ -211,11 +220,6 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
}
}
if (needSdp) {
args.push('-sdp_file', `pipe:${pipeCount++}`);
stdio.push('pipe');
}
// start ffmpeg process with child process pipes
args.unshift('-hide_banner');
safePrintFFmpegArguments(console, args);
@@ -225,20 +229,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
ffmpegLogInitialOutput(console, cp, undefined, options?.storage);
cp.on('exit', () => kill(new Error('ffmpeg exited')));
let sdp: Promise<Buffer[]>;
if (needSdp) {
sdp = new Promise<Buffer[]>(resolve => {
const ret: Buffer[] = [];
cp.stdio[pipeCount - 1].on('data', buffer => {
ret.push(buffer);
resolve(ret);
});
})
}
else {
sdp = Promise.resolve([]);
}
const deferredStart = new Deferred<void>();
// now parse the created pipes
const start = () => {
for (const p of startParsers) {
@@ -257,6 +248,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
await deferredStart.promise;
events.emit(container, chunk);
resetActivityTimer();
}
@@ -268,13 +260,22 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
});
};
// tbh parsing stdout is super sketchy way of doing this.
parseAudioCodec(cp).then(result => inputAudioCodec = result);
parseResolution(cp).then(result => inputVideoResolution = result);
await parseVideoCodec(cp).then(result => inputVideoCodec = result);
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
rtsp.sdp.then(sdp => {
const parsed = parseSdp(sdp);
const audio = parsed.msections.find(msection=>msection.type === 'audio');
const video = parsed.msections.find(msection=>msection.type === 'video');
inputVideoCodec = video?.codec;
inputAudioCodec = audio?.codec;
});
const sdp = rtsp.sdp.then(sdpString => [Buffer.from(sdpString)]);
start();
return {
start,
start() {
deferredStart.resolve();
},
sdp,
get inputAudioCodec() {
return inputAudioCodec;

View File

@@ -217,14 +217,12 @@ const acontrol = 'a=control:';
const artpmap = 'a=rtpmap:';
export function parseMSection(msection: string[]) {
const control = msection.find(line => line.startsWith(acontrol))?.substring(acontrol.length);
const rtpmapFirst = msection.find(line => line.startsWith(artpmap));
const mline = parseMLine(msection[0]);
let codec = parseRtpMap(mline.type, rtpmapFirst).codec;
const rtpmaps = msection.filter(line => line.startsWith(artpmap)).map(line => parseRtpMap(mline.type, line));
const rawRtpmaps = msection.filter(line => line.startsWith(artpmap));
const rtpmaps = rawRtpmaps.map(line => parseRtpMap(mline.type, line));
const codec = parseRtpMap(mline.type, rawRtpmaps[0]).codec;
let direction: string;
for (const checkDirection of ['sendonly', 'sendrecv', 'recvonly', 'inactive']) {
const found = msection.find(line => line === 'a=' + checkDirection);
if (found) {

View File

@@ -1,25 +0,0 @@
ARG BUILDPACK_DEPS_BASE="bullseye"
FROM debian:${BUILDPACK_DEPS_BASE} as header
RUN apt-get update && apt-get -y install curl wget
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
RUN apt-get -y update
RUN apt-get -y upgrade
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
ENV SCRYPTED_DOCKER_SERVE="true"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_DOCKER_FLAVOR=thin

View File

@@ -1,3 +0,0 @@
./docker-build.sh
docker build -t koush/scrypted:18-bullseye-full.nvidia -f Dockerfile.nvidia

View File

@@ -1,18 +0,0 @@
################################################################
# Begin section generated from template/Dockerfile.full.footer
################################################################
FROM header as base
ENV SCRYPTED_DOCKER_SERVE="true"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_DOCKER_FLAVOR=full
################################################################
# End section generated from template/Dockerfile.full.footer
################################################################

48
install/config.yaml Executable file
View File

@@ -0,0 +1,48 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "18-bullseye-full.s6-v0.23.0"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"
arch:
- amd64
- aarch64
- armv7
init: false
ingress: true
ingress_port: 11080
panel_icon: mdi:memory
hassio_api: true
homeassistant_api: true
ingress_stream: true
host_network: true
gpio: true
usb: true
uart: true
video: true
image: "ghcr.io/koush/scrypted"
environment:
SCRYPTED_INSTALL_PLUGIN: "@scrypted/homeassistant"
SCRYPTED_VOLUME: "/data/scrypted_data"
SCRYPTED_NVR_VOLUME: "/data/scrypted_nvr"
SCRYPTED_ADMIN_ADDRESS: "172.30.32.2"
SCRYPTED_ADMIN_USERNAME: "homeassistant"
SCRYPTED_INSTALL_ENVIRONMENT: "ha"
backup_exclude:
- '/server/**'
- '/data/scrypted_nvr/**'
- '/data/scrypted_data/plugins/**'
map:
- config:rw
- media:rw
devices:
- /dev/mem
- /dev/dri/renderD128
- /dev/apex_0
- /dev/apex_1
- /dev/apex_2
- /dev/apex_3
- /dev/dri/card0
- /dev/vchiq
- /dev/video10
- /dev/video0

View File

@@ -1,4 +1,4 @@
ARG BASE="18-bullseye-full"
ARG BASE="18-jammy-full"
FROM koush/scrypted-common:${BASE}
WORKDIR /

View File

@@ -1,4 +1,4 @@
ARG BASE="16-bullseye"
ARG BASE="16-jammy"
FROM koush/scrypted-common:${BASE}
WORKDIR /

View File

@@ -6,63 +6,66 @@
# This common file will be used by both Docker and the linux
# install script.
################################################################
ARG BUILDPACK_DEPS_BASE="bullseye"
FROM debian:${BUILDPACK_DEPS_BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update
RUN apt-get -y install libedgetpu1-std
# intel opencl gpu for openvino
RUN if [ "$(uname -m)" = "x86_64" ]; \
then \
apt-get -y install \
intel-opencl-icd; \
fi
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
RUN apt-get -y upgrade
# base development stuff
RUN apt-get -y install \
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
libvips \
pkg-config
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update && apt-get install -y nodejs
# python native
RUN apt-get -y install \
python3 \
python3-dev \
python3-pip \
python3-setuptools \
python3-wheel
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update && apt-get -y install libedgetpu1-std
# these are necessary for pillow-simd, additional on disk size is small
# but could consider removing this.
RUN apt-get -y install \
libjpeg-dev zlib1g-dev
# plugins support fallback to pillow, but vips is faster.
RUN apt-get -y install \
libvips
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
RUN apt-get -y install \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
gstreamer1.0-vaapi
# python native
# python3 gstreamer bindings
RUN apt-get -y install \
python3 \
python3-dev \
python3-gst-1.0 \
python3-pip \
python3-setuptools \
python3-wheel
python3-gst-1.0
# python 3.9 from ppa.
# 3.9 is the version with prebuilt support for tensorflow lite
RUN add-apt-repository ppa:deadsnakes/ppa && \
apt-get -y install \
python3.9 \
python3.9-dev \
python3.9-distutils
# armv7l does not have wheels for any of these
# and compile times would forever, if it works at all.
@@ -70,22 +73,28 @@ RUN apt-get -y install \
# which causes weird behavior in python which looks at the arch version
# which still reports 64bit, even if running in 32bit docker.
# this scenario is not supported and will be reported at runtime.
RUN if [ "$(uname -m)" != "x86_64" ]; \
then \
apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage; \
fi
# this bit is not necessary on amd64, but leaving it for consistency.
RUN apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage
# python pip
RUN python3 -m pip install --upgrade pip
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
RUN python3 -m pip install debugpy typing_extensions psutil
RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3.9 -m pip install debugpy typing_extensions psutil
################################################################
# End section generated from template/Dockerfile.full.header
@@ -95,14 +104,26 @@ RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
################################################################
FROM header as base
ENV SCRYPTED_DOCKER_SERVE="true"
# intel opencl gpu for openvino
RUN bash -c "if [ \"$(uname -m)\" == \"x86_64\" ]; \
then \
apt-get update && apt-get install -y gpg-agent && \
rm -f /usr/share/keyrings/intel-graphics.gpg && \
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg && \
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
apt-get -y update && \
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free && \
apt-get -y dist-upgrade; \
fi"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_BASE_VERSION=20230608
ENV SCRYPTED_DOCKER_FLAVOR=full
################################################################

View File

@@ -1,27 +1,23 @@
ARG BUILDPACK_DEPS_BASE="bullseye"
FROM debian:${BUILDPACK_DEPS_BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
RUN apt-get -y update
RUN apt-get -y upgrade
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
# base development stuff
RUN apt-get -y install \
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
pkg-config
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update && apt-get install -y nodejs
# python native
RUN apt-get -y install \
@@ -32,15 +28,16 @@ RUN apt-get -y install \
python3-wheel
# python pip
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
RUN python3 -m pip install debugpy typing_extensions psutil
ENV SCRYPTED_DOCKER_SERVE="true"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230329
ENV SCRYPTED_BASE_VERSION=20230608
ENV SCRYPTED_DOCKER_FLAVOR=lite

View File

@@ -1,4 +1,4 @@
FROM koush/18-bullseye-full.s6
FROM koush/18-jammy-full.s6
WORKDIR /
@@ -19,4 +19,4 @@ RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
RUN python3 -m pip install debugpy typing_extensions psutil

View File

@@ -1,11 +1,12 @@
ARG BASE="18-bullseye-full"
ARG BASE="18-jammy-full"
FROM koush/scrypted-common:${BASE}
# avahi advertiser support
RUN apt-get -y install \
RUN apt-get update && apt-get -y install \
libnss-mdns \
avahi-discover \
libavahi-compat-libdnssd-dev
libavahi-compat-libdnssd-dev \
xz-utils
# copy configurations and scripts
COPY fs /

View File

@@ -0,0 +1,22 @@
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get -y update && \
apt-get -y upgrade && \
apt-get -y install curl software-properties-common apt-utils
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash - && apt-get update && apt-get install -y nodejs
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230608
ENV SCRYPTED_DOCKER_FLAVOR=thin

View File

@@ -0,0 +1,3 @@
./docker-build.sh
docker build -t koush/scrypted:18-jammy-full.nvidia -f Dockerfile.nvidia

View File

@@ -3,15 +3,16 @@
set -x
NODE_VERSION=18
BUILDPACK_DEPS_BASE=bullseye
SCRYPTED_INSTALL_VERSION=beta
IMAGE_BASE=jammy
FLAVOR=full
BASE=$NODE_VERSION-$BUILDPACK_DEPS_BASE-$FLAVOR
BASE=$NODE_VERSION-$IMAGE_BASE-$FLAVOR
echo $BASE
SUPERVISOR=.s6
SUPERVISOR_BASE=$BASE$SUPERVISOR
docker build -t koush/scrypted-common:$BASE -f Dockerfile.$FLAVOR \
--build-arg NODE_VERSION=$NODE_VERSION --build-arg BUILDPACK_DEPS_BASE=$BUILDPACK_DEPS_BASE . && \
--build-arg NODE_VERSION=$NODE_VERSION --build-arg BASE=$IMAGE_BASE . && \
\
docker build -t koush/scrypted:$SUPERVISOR_BASE -f Dockerfile$SUPERVISOR \
--build-arg BASE=$BASE .
--build-arg BASE=$BASE --build-arg SCRYPTED_INSTALL_VERSION=$SCRYPTED_INSTALL_VERSION .

View File

@@ -40,6 +40,9 @@ services:
# - /dev/ttyACM0:/dev/ttyACM0
# all usb devices, such as coral tpu
# - /dev/bus/usb:/dev/bus/usb
# coral PCI devices
# - /dev/apex_0:/dev/apex_0
# - /dev/apex_1:/dev/apex_1
volumes:
- ~/.scrypted/volume:/server/volume

View File

@@ -42,7 +42,7 @@ fi
WATCHTOWER_HTTP_API_TOKEN=$(echo $RANDOM | md5sum)
DOCKER_COMPOSE_YML=$SCRYPTED_HOME/docker-compose.yml
echo "Created $DOCKER_COMPOSE_YML"
curl -s https://raw.githubusercontent.com/koush/scrypted/main/docker/docker-compose.yml | sed s/SET_THIS_TO_SOME_RANDOM_TEXT/"$(echo $RANDOM | md5sum | head -c 32)"/g > $DOCKER_COMPOSE_YML
curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/docker-compose.yml | sed s/SET_THIS_TO_SOME_RANDOM_TEXT/"$(echo $RANDOM | md5sum | head -c 32)"/g > $DOCKER_COMPOSE_YML
echo "Setting permissions on $SCRYPTED_HOME"
chown -R $SERVICE_USER $SCRYPTED_HOME

View File

@@ -0,0 +1,30 @@
################################################################
# Begin section generated from template/Dockerfile.full.footer
################################################################
FROM header as base
# intel opencl gpu for openvino
RUN bash -c "if [ \"$(uname -m)\" == \"x86_64\" ]; \
then \
apt-get update && apt-get install -y gpg-agent && \
rm -f /usr/share/keyrings/intel-graphics.gpg && \
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg && \
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
apt-get -y update && \
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free && \
apt-get -y dist-upgrade; \
fi"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION=20230608
ENV SCRYPTED_DOCKER_FLAVOR=full
################################################################
# End section generated from template/Dockerfile.full.footer
################################################################

View File

@@ -3,63 +3,66 @@
# This common file will be used by both Docker and the linux
# install script.
################################################################
ARG BUILDPACK_DEPS_BASE="bullseye"
FROM debian:${BUILDPACK_DEPS_BASE} as header
ARG BASE="jammy"
FROM ubuntu:${BASE} as header
RUN apt-get update && apt-get -y install curl wget
ENV DEBIAN_FRONTEND=noninteractive
# switch to nvm?
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update
RUN apt-get install -y nodejs
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update
RUN apt-get -y install libedgetpu1-std
# intel opencl gpu for openvino
RUN if [ "$(uname -m)" = "x86_64" ]; \
then \
apt-get -y install \
intel-opencl-icd; \
fi
RUN apt-get -y install software-properties-common apt-utils
RUN apt-get -y update
RUN apt-get -y upgrade
# base development stuff
RUN apt-get -y install \
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
libvips \
pkg-config
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
ARG NODE_VERSION=18
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
RUN apt-get update && apt-get install -y nodejs
# python native
RUN apt-get -y install \
python3 \
python3-dev \
python3-pip \
python3-setuptools \
python3-wheel
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update && apt-get -y install libedgetpu1-std
# these are necessary for pillow-simd, additional on disk size is small
# but could consider removing this.
RUN apt-get -y install \
libjpeg-dev zlib1g-dev
# plugins support fallback to pillow, but vips is faster.
RUN apt-get -y install \
libvips
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
RUN apt-get -y install \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
gstreamer1.0-vaapi
# python native
# python3 gstreamer bindings
RUN apt-get -y install \
python3 \
python3-dev \
python3-gst-1.0 \
python3-pip \
python3-setuptools \
python3-wheel
python3-gst-1.0
# python 3.9 from ppa.
# 3.9 is the version with prebuilt support for tensorflow lite
RUN add-apt-repository ppa:deadsnakes/ppa && \
apt-get -y install \
python3.9 \
python3.9-dev \
python3.9-distutils
# armv7l does not have wheels for any of these
# and compile times would forever, if it works at all.
@@ -67,22 +70,28 @@ RUN apt-get -y install \
# which causes weird behavior in python which looks at the arch version
# which still reports 64bit, even if running in 32bit docker.
# this scenario is not supported and will be reported at runtime.
RUN if [ "$(uname -m)" != "x86_64" ]; \
then \
apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage; \
fi
# this bit is not necessary on amd64, but leaving it for consistency.
RUN apt-get -y install \
python3-matplotlib \
python3-numpy \
python3-opencv \
python3-pil \
python3-skimage
# python pip
RUN python3 -m pip install --upgrade pip
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
RUN python3 -m pip install debugpy typing_extensions psutil
RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3.9 -m pip install debugpy typing_extensions psutil
################################################################
# End section generated from template/Dockerfile.full.header

BIN
install/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -45,10 +45,10 @@ ARG() {
}
ENV() {
echo "ignoring ENV $1"
export $@
}
source <(curl -s https://raw.githubusercontent.com/koush/scrypted/main/docker/template/Dockerfile.full.header)
source <(curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/template/Dockerfile.full.header)
if [ -z "$SERVICE_USER" ]
then

View File

@@ -87,7 +87,7 @@ if [ "$PYTHON_VERSION" != "3.10" ]
then
RUN python$PYTHON_VERSION -m pip install typing
fi
RUN python$PYTHON_VERSION -m pip install aiofiles debugpy typing_extensions opencv-python psutil
RUN python$PYTHON_VERSION -m pip install debugpy typing_extensions opencv-python psutil
echo "Installing Scrypted Launch Agent..."

View File

@@ -20,7 +20,7 @@ $env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";"
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install --upgrade pip
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install aiofiles debugpy typing_extensions typing opencv-python
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install debugpy typing_extensions typing opencv-python
npx -y scrypted@latest install-server

BIN
install/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 38 KiB

View File

@@ -23,10 +23,19 @@ async function example() {
if (!backyard)
throw new Error('Device not found');
backyard.listen(ScryptedInterface.ObjectDetector, (source, details, data) => {
backyard.listen(ScryptedInterface.ObjectDetector, async (source, details, data) => {
const results = data as ObjectsDetected;
console.log(results);
})
console.log('detection results', results);
// detections that are flagged for retention will have a detectionId.
// tf etc won't retain automatically, and this requires a wrapping detector like Scrypted NVR Object Detection
// to decide which frames to keep. Otherwise saving all images would be extremely poor performance.
if (!results.detectionId)
return;
const media = await backyard.getDetectionInput(results.detectionId);
const jpeg = await sdk.mediaManager.convertMediaObjectToBuffer(media, 'image/jpeg');
// do something with the buffer like save to disk or send to a service.
});
}
example();

View File

@@ -1,15 +1,15 @@
{
"name": "@scrypted/client",
"version": "1.1.51",
"version": "1.1.54",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/client",
"version": "1.1.51",
"version": "1.1.54",
"license": "ISC",
"dependencies": {
"@scrypted/types": "^0.2.80",
"@scrypted/types": "^0.2.91",
"axios": "^0.25.0",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"
@@ -21,9 +21,9 @@
}
},
"node_modules/@scrypted/types": {
"version": "0.2.80",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.80.tgz",
"integrity": "sha512-YVu7jcD5sYgjJLP7kH1K2FJzqrlcjdpDxzZoLXudZCKiujldbmLYcwglSgnN9bRqkKZcGOfru/WssvQj+0JioQ=="
"version": "0.2.91",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.91.tgz",
"integrity": "sha512-GfWil8cl2QwlTXk506ZXDALQfuv7zN48PtPlpmBMO/IYTQFtb+RB2zr+FwC9gdvRaZgs9NCCS2Fiig1OY7uxdQ=="
},
"node_modules/@socket.io/component-emitter": {
"version": "3.1.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/client",
"version": "1.1.51",
"version": "1.1.54",
"description": "",
"main": "dist/packages/client/src/index.js",
"scripts": {
@@ -17,7 +17,7 @@
"typescript": "^4.9.5"
},
"dependencies": {
"@scrypted/types": "^0.2.80",
"@scrypted/types": "^0.2.91",
"axios": "^0.25.0",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"

View File

@@ -78,25 +78,48 @@ export interface ScryptedClientOptions extends Partial<ScryptedLoginOptions> {
transports?: string[];
}
function isInstalledApp() {
return globalThis.navigator?.userAgent.includes('InstalledApp');
}
function isRunningStandalone() {
return globalThis.matchMedia?.('(display-mode: standalone)').matches || globalThis.navigator?.userAgent.includes('InstalledApp');
return globalThis.matchMedia?.('(display-mode: standalone)').matches || isInstalledApp();
}
export async function logoutScryptedClient(baseUrl?: string) {
const url = baseUrl ? new URL('/logout', baseUrl).toString() : '/logout';
const url = combineBaseUrl(baseUrl, 'logout');
const response = await axios(url, {
withCredentials: true,
});
return response.data;
}
export function getCurrentBaseUrl() {
// an endpoint within scrypted will be served at /endpoint/[org/][id]
// find the endpoint prefix and anything prior to that will be the server base url.
const url = new URL(window.location.href);
url.search = '';
url.hash = '';
let endpointPath = window.location.pathname;
const parts = endpointPath.split('/');
const index = parts.findIndex(p => p === 'endpoint');
if (index === -1) {
// console.warn('path not recognized, does not contain the segment "endpoint".')
return undefined;
}
const keep = parts.slice(0, index);
keep.push('');
url.pathname = keep.join('/');
return url.toString();
}
export async function loginScryptedClient(options: ScryptedLoginOptions) {
let { baseUrl, username, password, change_password, maxAge } = options;
// pwa should stay logged in for a year.
if (!maxAge && isRunningStandalone())
maxAge = 365 * 24 * 60 * 60 * 1000;
const url = `${baseUrl || ''}/login`;
const url = combineBaseUrl(baseUrl, 'login');
const response = await axios.post(url, {
username,
password,
@@ -129,7 +152,7 @@ export async function loginScryptedClient(options: ScryptedLoginOptions) {
export async function checkScryptedClientLogin(options?: ScryptedConnectionOptions) {
let { baseUrl } = options || {};
const url = `${baseUrl || ''}/login`;
const url = combineBaseUrl(baseUrl, 'login');
const response = await axios.get(url, {
withCredentials: true,
...options?.axiosConfig,
@@ -138,6 +161,7 @@ export async function checkScryptedClientLogin(options?: ScryptedConnectionOptio
const directAddress = response.headers['x-scrypted-direct-address'];
return {
hostname: response.data.hostname as string,
redirect: response.data.redirect as string,
username: response.data.username as string,
expiration: response.data.expiration as number,
@@ -145,6 +169,7 @@ export async function checkScryptedClientLogin(options?: ScryptedConnectionOptio
error: response.data.error as string,
authorization: response.data.authorization as string,
queryToken: response.data.queryToken as any,
token: response.data.token as string,
addresses: response.data.addresses as string[],
scryptedCloud,
directAddress,
@@ -175,9 +200,12 @@ export function redirectScryptedLogin(options?: {
globalThis.location.href = redirect_uri;
}
export function combineBaseUrl(baseUrl: string, rootPath: string) {
return baseUrl ? new URL(rootPath, baseUrl).toString() : '/' + rootPath;
}
export async function redirectScryptedLogout(baseUrl?: string) {
baseUrl = baseUrl || '';
globalThis.location.href = `${baseUrl}/logout`;
globalThis.location.href = combineBaseUrl(baseUrl, 'logout');
}
export async function connectScryptedClient(options: ScryptedClientOptions): Promise<ScryptedClientStatic> {
@@ -219,9 +247,10 @@ export async function connectScryptedClient(options: ScryptedClientOptions): Pro
}
let socket: IOClientSocket;
const endpointPath = `/endpoint/${pluginId}`;
const eioPath = `endpoint/${pluginId}/engine.io/api`;
const eioEndpoint = baseUrl ? new URL(eioPath, baseUrl).pathname : '/' + eioPath;
const eioOptions: Partial<SocketOptions> = {
path: `${endpointPath}/engine.io/api`,
path: eioEndpoint,
withCredentials: true,
extraHeaders,
rejectUnauthorized: false,
@@ -238,14 +267,15 @@ export async function connectScryptedClient(options: ScryptedClientOptions): Pro
// if the cert has been accepted. Other browsers seem fine.
// So the default is not to connect to IP addresses on Chrome, but do so on other browsers.
const isChrome = globalThis.navigator?.userAgent.includes('Chrome');
const isNotChromeOrIsInstalledApp = !isChrome || isInstalledApp();
const addresses: string[] = [];
const localAddressDefault = !isChrome;
const localAddressDefault = isNotChromeOrIsInstalledApp;
if (((scryptedCloud && options.local === undefined && localAddressDefault) || options.local) && localAddresses) {
addresses.push(...localAddresses);
}
const directAddressDefault = directAddress && (!isChrome || !isIPAddress(directAddress));
const directAddressDefault = directAddress && (isNotChromeOrIsInstalledApp || !isIPAddress(directAddress));
if (((scryptedCloud && options.direct === undefined && directAddressDefault) || options.direct) && directAddress) {
addresses.push(directAddress);
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/h264-packetizer",
"version": "0.0.6",
"version": "0.0.7",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/h264-packetizer",
"version": "0.0.6",
"version": "0.0.7",
"license": "ISC",
"devDependencies": {
"@types/node": "^18.11.18",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/h264-repacketizer",
"version": "0.0.6",
"version": "0.0.7",
"description": "",
"main": "dist/index.js",
"scripts": {

View File

@@ -1,4 +1,4 @@
{
"scrypted.debugHost": "10.10.0.50",
"scrypted.debugHost": "koushik-ubuntu",
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/alexa",
"version": "0.2.3",
"version": "0.2.5",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/alexa",
"version": "0.2.3",
"version": "0.2.5",
"dependencies": {
"axios": "^1.3.4",
"uuid": "^9.0.0"
@@ -17,7 +17,8 @@
}
},
"../../sdk": {
"version": "0.2.85",
"name": "@scrypted/sdk",
"version": "0.2.101",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/alexa",
"version": "0.2.4",
"version": "0.2.5",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.121",
"version": "0.0.122",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.121",
"version": "0.0.122",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.121",
"version": "0.0.122",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -33,6 +33,16 @@ export class AmcrestCameraClient {
});
}
async reboot() {
const response = await this.digestAuth.request({
httpsAgent: amcrestHttpsAgent,
method: "GET",
responseType: 'text',
url: `http://${this.ip}/cgi-bin/magicBox.cgi?action=reboot`,
});
return response.data as string;
}
async checkTwoWayAudio() {
const response = await this.digestAuth.request({
httpsAgent: amcrestHttpsAgent,

View File

@@ -1,6 +1,6 @@
import { ffmpegLogInitialOutput } from '@scrypted/common/src/media-helpers';
import { readLength } from "@scrypted/common/src/read-stream";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, PictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, PictureOptions, Reboot, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import child_process, { ChildProcess } from 'child_process';
import { PassThrough, Readable, Stream } from "stream";
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
@@ -23,7 +23,7 @@ function findValue(blob: string, prefix: string, key: string) {
return parts[1];
}
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, VideoRecorder {
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, VideoRecorder, Reboot {
eventStream: Stream;
cp: ChildProcess;
client: AmcrestCameraClient;
@@ -37,9 +37,15 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
this.storage.removeItem('amcrestDoorbell');
}
this.updateDevice();
this.updateDeviceInfo();
}
async reboot() {
const client = this.getClient();
await client.reboot();
}
getRecordingStreamCurrentTime(recordingStream: MediaObject): Promise<number> {
throw new Error("Method not implemented.");
}
@@ -440,6 +446,29 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
return this.videoStreamOptions;
}
updateDevice() {
const doorbellType = this.storage.getItem('doorbellType');
const isDoorbell = doorbellType === AMCREST_DOORBELL_TYPE || doorbellType === DAHUA_DOORBELL_TYPE;
// true is the legacy value before onvif was added.
const twoWayAudio = this.storage.getItem('twoWayAudio') === 'true'
|| this.storage.getItem('twoWayAudio') === 'ONVIF'
|| this.storage.getItem('twoWayAudio') === 'Amcrest';
const interfaces = this.provider.getInterfaces();
let type: ScryptedDeviceType = undefined;
if (isDoorbell) {
type = ScryptedDeviceType.Doorbell;
interfaces.push(ScryptedInterface.BinarySensor)
}
if (isDoorbell || twoWayAudio) {
interfaces.push(ScryptedInterface.Intercom);
}
const continuousRecording = this.storage.getItem('continuousRecording') === 'true';
if (continuousRecording)
interfaces.push(ScryptedInterface.VideoRecorder);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
async putSetting(key: string, value: string) {
if (key === 'continuousRecording') {
if (value === 'true') {
@@ -461,27 +490,8 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
this.videoStreamOptions = undefined;
super.putSetting(key, value);
const doorbellType = this.storage.getItem('doorbellType');
const isDoorbell = doorbellType === AMCREST_DOORBELL_TYPE || doorbellType === DAHUA_DOORBELL_TYPE;
// true is the legacy value before onvif was added.
const twoWayAudio = this.storage.getItem('twoWayAudio') === 'true'
|| this.storage.getItem('twoWayAudio') === 'ONVIF'
|| this.storage.getItem('twoWayAudio') === 'Amcrest';
const interfaces = this.provider.getInterfaces();
let type: ScryptedDeviceType = undefined;
if (isDoorbell) {
type = ScryptedDeviceType.Doorbell;
interfaces.push(ScryptedInterface.BinarySensor)
}
if (isDoorbell || twoWayAudio) {
interfaces.push(ScryptedInterface.Intercom);
}
const continuousRecording = this.storage.getItem('continuousRecording') === 'true';
if (continuousRecording)
interfaces.push(ScryptedInterface.VideoRecorder);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
this.updateDevice();
this.updateDeviceInfo();
}
@@ -576,6 +586,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
class AmcrestProvider extends RtspProvider {
getAdditionalInterfaces() {
return [
ScryptedInterface.Reboot,
ScryptedInterface.VideoCameraConfiguration,
ScryptedInterface.Camera,
ScryptedInterface.AudioSensor,

View File

@@ -1,13 +1,25 @@
# Arlo Plugin for Scrypted
The Arlo Plugin connects Scrypted to Arlo cloud, allowing you to access all of your Arlo cameras in Scrypted.
The Arlo Plugin connects Scrypted to Arlo Cloud, allowing you to access all of your Arlo cameras in Scrypted.
It is highly recommended to create a dedicated Arlo account for use with this plugin and share your cameras from your main account, as Arlo only permits one connection to their servers per account. Using a separate account allows you to use the Arlo app or website simultaneously with this plugin.
It is highly recommended to create a dedicated Arlo account for use with this plugin and share your cameras from your main account, as Arlo only permits one active login to their servers per account. Using a separate account allows you to use the Arlo app or website simultaneously with this plugin, otherwise logging in from one place will log you out from all other devices.
The account you use for this plugin must have either SMS or email set as the default 2FA option. Once you enter your username and password on the plugin settings page, you should receive a 2FA code through your default 2FA option. Enter that code into the provided box, and your cameras will appear in Scrypted. Or, see below for configuring IMAP to auto-login with 2FA.
If you experience any trouble logging in, clear the username and password boxes, reload the plugin, and try again.
## General Setup Notes
* Ensure that your Arlo account's default 2FA option is set to either SMS or email.
* Motion events notifications should be turned on in the Arlo app. If you are receiving motion push notifications, Scrypted will also receive motion events.
* Disable smart detection and any cloud/local recording in the Arlo app. Arlo Cloud only permits one active stream per camera, so any smart detection or recording features may prevent downstream plugins (e.g. Homekit) from successfully pulling the video feed after a motion event.
* It is highly recommended to enable the Rebroadcast plugin to allow multiple downstream plugins to pull the video feed within Scrypted.
* If there is no audio on your camera, switch to the `FFmpeg (TCP)` parser under the `Cloud RTSP` settings.
* Prebuffering should only be enabled if the camera is wired to a persistent power source, such as a wall outlet. Prebuffering will only work if your camera does not have a battery or `Plugged In to External Power` is selected.
* The plugin supports pulling RTSP or DASH streams from Arlo Cloud. It is recommended to use RTSP for the lowest latency streams. DASH is inconsistent in reliability, and may return finicky codecs that require additional FFmpeg output arguments, e.g. `-vcodec h264`.
Note that streaming cameras uses extra Internet bandwidth, since video and audio packets will need to travel from the camera through your network, out to Arlo Cloud, and then back to your network and into Scrypted.
## IMAP 2FA
The Arlo Plugin supports using the IMAP protocol to check an email mailbox for Arlo 2FA codes. This requires you to specify an email 2FA option as the default in your Arlo account settings.

View File

@@ -1,19 +1,19 @@
{
"name": "@scrypted/arlo",
"version": "0.7.13",
"version": "0.7.29",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/arlo",
"version": "0.7.13",
"version": "0.7.29",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.87",
"version": "0.2.101",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/arlo",
"version": "0.7.13",
"version": "0.7.29",
"description": "Arlo Plugin for Scrypted",
"keywords": [
"scrypted",

View File

@@ -24,6 +24,7 @@ limitations under the License.
# Import helper classes that are part of this library.
from .request import Request
from .host_picker import pick_host
from .mqtt_stream_async import MQTTStream
from .sse_stream_async import EventStream
from .logging import logger
@@ -31,6 +32,7 @@ from .logging import logger
# Import all of the other stuff.
from datetime import datetime, timedelta
from cachetools import cached, TTLCache
import scrypted_arlo_go
import asyncio
import sys
@@ -38,6 +40,8 @@ import base64
import math
import random
import time
import uuid
from urllib.parse import urlparse, parse_qs
stream_class = MQTTStream
@@ -78,8 +82,11 @@ USER_AGENTS = {
class Arlo(object):
BASE_URL = 'my.arlo.com'
AUTH_URL = 'ocapi-app.arlo.com'
BACKUP_AUTH_HOSTS = list(scrypted_arlo_go.BACKUP_AUTH_HOSTS())
TRANSID_PREFIX = 'web'
random.shuffle(BACKUP_AUTH_HOSTS)
def __init__(self, username, password):
self.username = username
self.password = password
@@ -137,8 +144,7 @@ class Arlo(object):
self.BASE_URL = 'myapi.arlo.com'
def LoginMFA(self):
self.request = Request()
device_id = str(uuid.uuid4())
headers = {
'DNT': '1',
'schemaVersion': '1',
@@ -149,11 +155,33 @@ class Arlo(object):
'Referer': f'https://{self.BASE_URL}/',
'Source': 'arloCamWeb',
'TE': 'Trailers',
'x-user-device-id': device_id,
'x-user-device-automation-name': 'QlJPV1NFUg==',
'x-user-device-type': 'BROWSER',
'Host': self.AUTH_URL,
}
self.request = Request()
try:
auth_host = self.AUTH_URL
self.request.options(f'https://{auth_host}/api/auth', headers=headers)
logger.info("Using primary authentication host")
except Exception as e:
# in case cloudflare rejects our auth request...
logger.warning(f"Using fallback authentication host due to: {e}")
auth_host = pick_host([
base64.b64decode(h.encode("utf-8")).decode("utf-8")
for h in self.BACKUP_AUTH_HOSTS
], self.AUTH_URL, "/api/auth")
logger.debug(f"Selected backup authentication host {auth_host}")
self.request = Request(mode="ip")
# Authenticate
self.request.options(f'https://{auth_host}/api/auth', headers=headers)
auth_body = self.request.post(
f'https://{self.AUTH_URL}/api/auth',
f'https://{auth_host}/api/auth',
params={
'email': self.username,
'password': str(base64.b64encode(self.password.encode('utf-8')), 'utf-8'),
@@ -168,21 +196,26 @@ class Arlo(object):
# Retrieve MFA factor id
factors_body = self.request.get(
f'https://{self.AUTH_URL}/api/getFactors',
f'https://{auth_host}/api/getFactors',
params={'data': auth_body['data']['issued']},
headers=headers,
raw=True
)
factor_id = next(
i for i in factors_body['data']['items']
if (i['factorType'] == 'EMAIL' or i['factorType'] == 'SMS')
and i['factorRole'] == "PRIMARY"
)['factorId']
iter([
i for i in factors_body['data']['items']
if (i['factorType'] == 'EMAIL' or i['factorType'] == 'SMS')
and i['factorRole'] == "PRIMARY"
]),
{}
).get('factorId')
if not factor_id:
raise Exception("Could not find valid 2FA method - is the primary 2FA set to either Email or SMS?")
# Start factor auth
start_auth_body = self.request.post(
f'https://{self.AUTH_URL}/api/startAuth',
{'factorId': factor_id},
f'https://{auth_host}/api/startAuth',
params={'factorId': factor_id},
headers=headers,
raw=True
)
@@ -192,8 +225,8 @@ class Arlo(object):
nonlocal self, factor_auth_code, headers
finish_auth_body = self.request.post(
f'https://{self.AUTH_URL}/api/finishAuth',
{
f'https://{auth_host}/api/finishAuth',
params={
'factorAuthCode': factor_auth_code,
'otp': code
},
@@ -201,6 +234,11 @@ class Arlo(object):
raw=True
)
if finish_auth_body.get('data', {}).get('token') is None:
raise Exception("Could not complete 2FA, maybe invalid token? If the error persists, please try reloading the plugin and logging in again.")
self.request = Request()
# Update Authorization code with new code
headers = {
'Auth-Version': '2',
@@ -254,14 +292,16 @@ class Arlo(object):
cameras[camera['deviceId']] = camera
# filter out cameras without basestation, where they are their own basestations
# for now, keep doorbells and sirens in the list so they get pings
# this is so battery-powered devices do not drain due to pings
# for wired devices, keep doorbells, sirens, and arloq in the list so they get pings
proper_basestations = {}
for basestation in basestations.values():
if basestation['deviceId'] == basestation.get('parentId') and basestation['deviceType'] not in ['doorbell', 'siren']:
if basestation['deviceId'] == basestation.get('parentId') and \
basestation['deviceType'] not in ['doorbell', 'siren', 'arloq', 'arloqs']:
continue
proper_basestations[basestation['deviceId']] = basestation
logger.info(f"Will send heartbeat to the following basestations: {list(proper_basestations.keys())}")
logger.info(f"Will send heartbeat to the following devices: {list(proper_basestations.keys())}")
# start heartbeat loop with only basestations
asyncio.get_event_loop().create_task(heartbeat(self, list(proper_basestations.values())))
@@ -349,7 +389,7 @@ class Arlo(object):
body['from'] = self.user_id+'_web'
body['to'] = basestation_id
self.request.post(f'https://{self.BASE_URL}/hmsweb/users/devices/notify/'+body['to'], body, headers={"xcloudId":basestation.get('xCloudId')})
self.request.post(f'https://{self.BASE_URL}/hmsweb/users/devices/notify/'+body['to'], params=body, headers={"xcloudId":basestation.get('xCloudId')})
return body.get('transId')
def Ping(self, basestation):
@@ -601,7 +641,7 @@ class Arlo(object):
If you pass in a valid device type, as a string or a list, this method will return an array of just those devices that match that type. An example would be ['basestation', 'camera']
To filter provisioned or unprovisioned devices pass in a True/False value for filter_provisioned. By default both types are returned.
"""
devices = self.request.get(f'https://{self.BASE_URL}/hmsweb/v2/users/devices')
devices = self._getDevicesImpl()
if device_type:
devices = [ device for device in devices if device.get('deviceType') in device_type]
@@ -613,23 +653,34 @@ class Arlo(object):
return devices
async def StartStream(self, basestation, camera):
@cached(cache=TTLCache(maxsize=1, ttl=60))
def _getDevicesImpl(self):
devices = self.request.get(f'https://{self.BASE_URL}/hmsweb/v2/users/devices')
return devices
async def StartStream(self, basestation, camera, mode="rtsp"):
"""
This function returns the url of the rtsp video stream.
This stream needs to be called within 30 seconds or else it becomes invalid.
It can be streamed with: ffmpeg -re -i 'rtsps://<url>' -acodec copy -vcodec copy test.mp4
The request to /users/devices/startStream returns: { url:rtsp://<url>:443/vzmodulelive?egressToken=b<xx>&userAgent=iOS&cameraId=<camid>}
If mode is set to "dash", returns the url to the mpd file for DASH streaming.
"""
resource = f"cameras/{camera.get('deviceId')}"
if mode not in ["rtsp", "dash"]:
raise ValueError("mode must be 'rtsp' or 'dash'")
# nonlocal variable hack for Python 2.x.
class nl:
stream_url_dict = None
def trigger(self):
ua = USER_AGENTS['arlo'] if mode == "rtsp" else USER_AGENTS["firefox"]
nl.stream_url_dict = self.request.post(
f'https://{self.BASE_URL}/hmsweb/users/devices/startStream',
{
params={
"to": camera.get('parentId'),
"from": self.user_id + "_web",
"resource": "cameras/" + camera.get('deviceId'),
@@ -642,14 +693,17 @@ class Arlo(object):
"cameraId": camera.get('deviceId')
}
},
headers={"xcloudId":camera.get('xCloudId')}
headers={"xcloudId":camera.get('xCloudId'), 'User-Agent': ua}
)
def callback(self, event):
#return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
properties = event.get("properties", {})
if properties.get("activityState") == "userStreamActive":
return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
if mode == "rtsp":
return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
else:
return nl.stream_url_dict['url'].replace(":80", "")
return None
return await self.TriggerAndHandleEvent(
@@ -660,6 +714,27 @@ class Arlo(object):
callback,
)
def GetMPDHeaders(self, url: str) -> dict:
parsed = urlparse(url)
query = parse_qs(parsed.query)
headers = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "en-US,en;q=0.9",
"Connection": "keep-alive",
"DNT": "1",
"Egress-Token": query['egressToken'][0],
"Origin": "https://my.arlo.com",
"Referer": "https://my.arlo.com/",
"User-Agent": USER_AGENTS["firefox"],
}
return headers
def GetSIPInfo(self):
resp = self.request.get(f'https://{self.BASE_URL}/hmsweb/users/devices/sipInfo')
return resp
def StartPushToTalk(self, basestation, camera):
url = f'https://{self.BASE_URL}/hmsweb/users/devices/{self.user_id}_{camera.get("deviceId")}/pushtotalk'
resp = self.request.get(url)
@@ -702,7 +777,7 @@ class Arlo(object):
def trigger(self):
self.request.post(
f"https://{self.BASE_URL}/hmsweb/users/devices/fullFrameSnapshot",
{
params={
"to": camera.get("parentId"),
"from": self.user_id + "_web",
"resource": "cameras/" + camera.get("deviceId"),
@@ -885,7 +960,7 @@ class Arlo(object):
logger.debug(f"Library cache miss for {from_date}, {to_date}")
return self.request.post(
f'https://{self.BASE_URL}/hmsweb/users/library',
{
params={
'dateFrom': from_date,
'dateTo': to_date
}

View File

@@ -0,0 +1,26 @@
import ssl
from socket import setdefaulttimeout
import requests
from requests_toolbelt.adapters import host_header_ssl
import scrypted_arlo_go
from .logging import logger
setdefaulttimeout(5)
def pick_host(hosts, hostname_to_match, endpoint_to_test):
session = requests.Session()
session.mount('https://', host_header_ssl.HostHeaderSSLAdapter())
for host in hosts:
try:
c = ssl.get_server_certificate((host, 443))
scrypted_arlo_go.VerifyCertHostname(c, hostname_to_match)
r = session.post(f"https://{host}{endpoint_to_test}", headers={"Host": hostname_to_match})
r.raise_for_status()
return host
except Exception as e:
logger.warning(f"{host} is invalid: {e}")
raise Exception("no valid hosts found!")

View File

@@ -16,6 +16,8 @@
import requests
from requests.exceptions import HTTPError
from requests_toolbelt.adapters import host_header_ssl
import cloudscraper
import time
import uuid
@@ -27,8 +29,13 @@ import uuid
class Request(object):
"""HTTP helper class"""
def __init__(self, timeout=5):
self.session = requests.Session()
def __init__(self, timeout=5, mode="cloudscraper"):
if mode == "cloudscraper":
from .arlo_async import USER_AGENTS
self.session = cloudscraper.CloudScraper(browser={"custom": USER_AGENTS["arlo"]})
elif mode == "ip":
self.session = requests.Session()
self.session.mount('https://', host_header_ssl.HostHeaderSSLAdapter())
self.timeout = timeout
def gen_event_id(self):
@@ -37,7 +44,7 @@ class Request(object):
def get_time(self):
return int(time.time_ns() / 1_000_000)
def _request(self, url, method='GET', params={}, headers={}, stream=False, raw=False):
def _request(self, url, method='GET', params={}, headers={}, raw=False, skip_event_id=False):
## uncomment for debug logging
"""
@@ -51,14 +58,13 @@ class Request(object):
req_log.propagate = True
#"""
url = f'{url}?eventId={self.gen_event_id()}&time={self.get_time()}'
if not skip_event_id:
url = f'{url}?eventId={self.gen_event_id()}&time={self.get_time()}'
if method == 'GET':
#print('COOKIES: ', self.session.cookies.get_dict())
r = self.session.get(url, params=params, headers=headers, stream=stream, timeout=self.timeout)
r = self.session.get(url, params=params, headers=headers, timeout=self.timeout)
r.raise_for_status()
if stream is True:
return r
elif method == 'PUT':
r = self.session.put(url, json=params, headers=headers, timeout=self.timeout)
r.raise_for_status()
@@ -81,14 +87,14 @@ class Request(object):
else:
raise HTTPError('Request ({0} {1}) failed: {2}'.format(method, url, r.json()), response=r)
def get(self, url, params={}, headers={}, stream=False, raw=False):
return self._request(url, 'GET', params=params, headers=headers, stream=stream, raw=raw)
def get(self, url, **kwargs):
return self._request(url, 'GET', **kwargs)
def put(self, url, params={}, headers={}, raw=False):
return self._request(url, 'PUT', params=params, headers=headers, raw=raw)
def put(self, url, **kwargs):
return self._request(url, 'PUT', **kwargs)
def post(self, url, params={}, headers={}, raw=False):
return self._request(url, 'POST', params=params, headers=headers, raw=raw)
def post(self, url, **kwargs):
return self._request(url, 'POST', **kwargs)
def options(self, url, headers={}, raw=False):
return self._request(url, 'OPTIONS', headers=headers, raw=raw)
def options(self, url, **kwargs):
return self._request(url, 'OPTIONS', **kwargs)

View File

@@ -1,17 +1,20 @@
from __future__ import annotations
import asyncio
import aiohttp
from async_timeout import timeout as async_timeout
from datetime import datetime, timedelta
import json
import threading
import time
from typing import List, TYPE_CHECKING
import scrypted_arlo_go
import scrypted_sdk
from scrypted_sdk.types import Setting, Settings, Device, Camera, VideoCamera, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
from scrypted_sdk.types import Setting, Settings, SettingValue, Device, Camera, VideoCamera, RequestMediaStreamOptions, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, Charger, ChargeState, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
from .arlo.arlo_async import USER_AGENTS
from .experimental import EXPERIMENTAL
from .base import ArloDeviceBase
from .spotlight import ArloSpotlight, ArloFloodlight
from .vss import ArloSirenVirtualSecuritySystem
@@ -23,7 +26,7 @@ if TYPE_CHECKING:
from .provider import ArloProvider
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider, VideoClips, MotionSensor, AudioSensor, Battery):
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider, VideoClips, MotionSensor, AudioSensor, Battery, Charger):
MODELS_WITH_SPOTLIGHTS = [
"vmc4040p",
"vmc2030",
@@ -73,14 +76,41 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
timeout: int = 30
intercom_session = None
goSM = None
light: ArloSpotlight = None
vss: ArloSirenVirtualSecuritySystem = None
picture_lock: asyncio.Lock = None
# eco mode bookkeeping
last_picture: bytes = None
last_picture_time: datetime = datetime(1970, 1, 1)
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
self.picture_lock = asyncio.Lock()
self.start_motion_subscription()
self.start_audio_subscription()
self.start_battery_subscription()
self.create_task(self.delayed_init())
async def delayed_init(self) -> None:
if not self.has_battery:
return
iterations = 1
while not self.stop_subscriptions:
if iterations > 100:
self.logger.error("Delayed init exceeded iteration limit, giving up")
return
try:
self.chargeState = ChargeState.Charging.value if self.wired_to_power else ChargeState.NotCharging.value
return
except Exception as e:
self.logger.debug(f"Delayed init failed, will try again: {e}")
await asyncio.sleep(0.1)
iterations += 1
def start_motion_subscription(self) -> None:
def callback(motionDetected):
@@ -104,7 +134,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
)
def start_battery_subscription(self) -> None:
if self.wired_to_power:
if not self.has_battery:
return
def callback(batteryLevel):
@@ -123,19 +153,12 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
ScryptedInterface.Settings.value,
])
if self.two_way_audio:
results.discard(ScryptedInterface.RTCSignalingChannel.value)
if EXPERIMENTAL or not self.uses_sip_push_to_talk:
results.add(ScryptedInterface.Intercom.value)
if self.webrtc_emulation:
results.add(ScryptedInterface.RTCSignalingChannel.value)
results.discard(ScryptedInterface.Intercom.value)
if self.has_battery:
results.add(ScryptedInterface.Battery.value)
if self.wired_to_power:
results.discard(ScryptedInterface.Battery.value)
results.add(ScryptedInterface.Charger.value)
if self.has_siren or self.has_spotlight or self.has_floodlight:
results.add(ScryptedInterface.DeviceProvider.value)
@@ -146,10 +169,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
if self.has_cloud_recording:
results.add(ScryptedInterface.VideoClips.value)
if not self._can_push_to_talk():
results.discard(ScryptedInterface.RTCSignalingChannel.value)
results.discard(ScryptedInterface.Intercom.value)
return list(results)
def get_device_type(self) -> str:
@@ -191,23 +210,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
] + vss.get_builtin_child_device_manifests())
return results
@property
def webrtc_emulation(self) -> bool:
if self.storage:
return True if self.storage.getItem("webrtc_emulation") else False
else:
return False
@property
def two_way_audio(self) -> bool:
if self.storage:
val = self.storage.getItem("two_way_audio")
if val is None:
val = True
return val
else:
return True
@property
def wired_to_power(self) -> bool:
if self.storage:
@@ -215,6 +217,21 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
else:
return False
@property
def eco_mode(self) -> bool:
if self.storage:
return True if self.storage.getItem("eco_mode") else False
else:
return False
@property
def snapshot_throttle_interval(self) -> bool:
interval = self.storage.getItem("snapshot_throttle_interval")
if interval is None:
interval = 60
self.storage.setItem("snapshot_throttle_interval", interval)
return int(interval)
@property
def has_cloud_recording(self) -> bool:
return self.provider.arlo.GetSmartFeatures(self.arlo_device).get("planFeatures", {}).get("eventRecording", False)
@@ -239,45 +256,75 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
def has_battery(self) -> bool:
return not any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITHOUT_BATTERY])
@property
def uses_sip_push_to_talk(self) -> bool:
return self.arlo_device["deviceId"] == self.arlo_device["parentId"]
async def getSettings(self) -> List[Setting]:
result = []
if self.has_battery:
result.append(
{
"group": "General",
"key": "wired_to_power",
"title": "Plugged In to External Power",
"value": self.wired_to_power,
"description": "Informs Scrypted that this device is plugged in to an external power source. " + \
"Will allow features like persistent prebuffer to work, however will no longer report this device's battery percentage. " + \
"Will allow features like persistent prebuffer to work. " + \
"Note that a persistent prebuffer may cause excess battery drain if the external power is not able to charge faster than the battery consumption rate.",
"type": "boolean",
},
)
if self._can_push_to_talk():
result.extend([
result.append(
{
"group": "General",
"key": "eco_mode",
"title": "Eco Mode",
"value": self.eco_mode,
"description": "Configures Scrypted to limit the number of requests made to this camera. " + \
"Additional eco mode settings will appear when this is turned on.",
"type": "boolean",
}
)
if self.eco_mode:
result.append(
{
"key": "two_way_audio",
"title": "(Experimental) Enable native two-way audio",
"value": self.two_way_audio,
"description": "Enables two-way audio for this device. Not yet completely functional on all audio senders.",
"type": "boolean",
},
{
"key": "webrtc_emulation",
"title": "(Highly Experimental) Emulate WebRTC Camera",
"value": self.webrtc_emulation,
"description": "Configures the plugin to offer this device as a WebRTC camera, merging video/audio stream with two-way audio. "
"If enabled, takes precedence over native two-way audio. May use increased system resources.",
"type": "boolean",
},
])
"group": "Eco Mode",
"key": "snapshot_throttle_interval",
"title": "Snapshot Throttle Interval",
"value": self.snapshot_throttle_interval,
"description": "Time, in minutes, to throttle snapshot requests. " + \
"When eco mode is on, snapshot requests to the camera will be throttled for the given duration. " + \
"Cached snapshots may be returned if the time since the last snapshot has not exceeded the interval. " + \
"A value of 0 will disable throttling even when eco mode is on.",
"type": "number",
}
)
return result
@async_print_exception_guard
async def putSetting(self, key, value) -> None:
if key in ["webrtc_emulation", "two_way_audio", "wired_to_power"]:
async def putSetting(self, key: str, value: SettingValue) -> None:
if not self.validate_setting(key, value):
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
return
if key in ["wired_to_power"]:
self.storage.setItem(key, value == "true" or value == True)
await self.provider.discover_devices()
elif key in ["eco_mode"]:
self.storage.setItem(key, value == "true" or value == True)
else:
self.storage.setItem(key, value)
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
def validate_setting(self, key: str, val: SettingValue) -> bool:
if key == "snapshot_throttle_interval":
try:
val = int(val)
except ValueError:
self.logger.error(f"Invalid snapshot throttle interval '{val}' - must be an integer")
return False
return True
async def getPictureOptions(self) -> List[ResponsePictureOptions]:
return []
@@ -296,16 +343,30 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
self.logger.warning(f"Could not fetch from prebuffer due to: {e}")
self.logger.warning("Will try to fetch snapshot from Arlo cloud")
pic_url = await asyncio.wait_for(self.provider.arlo.TriggerFullFrameSnapshot(self.arlo_basestation, self.arlo_device), timeout=self.timeout)
self.logger.debug(f"Got snapshot URL for at {pic_url}")
async with self.picture_lock:
if self.eco_mode and self.snapshot_throttle_interval > 0:
if datetime.now() - self.last_picture_time <= timedelta(minutes=self.snapshot_throttle_interval):
self.logger.info("Using cached image")
return await scrypted_sdk.mediaManager.createMediaObject(self.last_picture, "image/jpeg")
if pic_url is None:
raise Exception("Error taking snapshot")
pic_url = await asyncio.wait_for(self.provider.arlo.TriggerFullFrameSnapshot(self.arlo_basestation, self.arlo_device), timeout=self.timeout)
self.logger.debug(f"Got snapshot URL for at {pic_url}")
return await scrypted_sdk.mediaManager.createMediaObject(str.encode(pic_url), ScryptedMimeTypes.Url.value)
if pic_url is None:
raise Exception("Error taking snapshot")
async def getVideoStreamOptions(self) -> List[ResponseMediaStreamOptions]:
return [
async with async_timeout(self.timeout):
async with aiohttp.ClientSession() as session:
async with session.get(pic_url) as resp:
if resp.status != 200:
raise Exception(f"Unexpected status downloading snapshot image: {resp.status}")
self.last_picture = await resp.read()
self.last_picture_time = datetime.now()
return await scrypted_sdk.mediaManager.createMediaObject(self.last_picture, "image/jpeg")
async def getVideoStreamOptions(self, id: str = None) -> List[ResponseMediaStreamOptions]:
options = [
{
"id": 'default',
"name": 'Cloud RTSP',
@@ -319,72 +380,138 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
"source": 'cloud',
"tool": 'scrypted',
"userConfigurable": False,
},
{
"id": 'dash',
"name": 'Cloud DASH',
"container": 'dash',
"video": {
"codec": 'unknown',
},
"audio": None if self.arlo_device.get("modelId") == "VMC3030" else {
"codec": 'unknown',
},
"source": 'cloud',
"tool": 'ffmpeg',
"userConfigurable": False,
}
]
async def _getVideoStreamURL(self) -> str:
self.logger.info("Requesting stream")
rtsp_url = await asyncio.wait_for(self.provider.arlo.StartStream(self.arlo_basestation, self.arlo_device), timeout=self.timeout)
self.logger.debug(f"Got stream URL at {rtsp_url}")
return rtsp_url
if id is None:
return options
async def getVideoStream(self, options: dict = None) -> MediaObject:
return next(iter([o for o in options if o['id'] == id]))
async def _getVideoStreamURL(self, container: str) -> str:
self.logger.info(f"Requesting {container} stream")
url = await asyncio.wait_for(self.provider.arlo.StartStream(self.arlo_basestation, self.arlo_device, mode=container), timeout=self.timeout)
self.logger.debug(f"Got {container} stream URL at {url}")
return url
@async_print_exception_guard
async def getVideoStream(self, options: RequestMediaStreamOptions = None) -> MediaObject:
self.logger.debug("Entered getVideoStream")
rtsp_url = await self._getVideoStreamURL()
mso = (await self.getVideoStreamOptions())[0]
mso = await self.getVideoStreamOptions(id=options["id"])
mso['refreshAt'] = round(time.time() * 1000) + 30 * 60 * 1000
container = mso["container"]
url = await self._getVideoStreamURL(container)
additional_ffmpeg_args = []
if container == "dash":
headers = self.provider.arlo.GetMPDHeaders(url)
ffmpeg_headers = '\r\n'.join([
f'{k}: {v}'
for k, v in headers.items()
])
additional_ffmpeg_args = ['-headers', ffmpeg_headers+'\r\n']
ffmpeg_input = {
'url': rtsp_url,
'container': 'rtsp',
'url': url,
'container': container,
'mediaStreamOptions': mso,
'inputArguments': [
'-f', 'rtsp',
'-i', rtsp_url,
'-f', container,
*additional_ffmpeg_args,
'-i', url,
]
}
return await scrypted_sdk.mediaManager.createFFmpegMediaObject(ffmpeg_input)
@async_print_exception_guard
async def startRTCSignalingSession(self, scrypted_session):
plugin_session = ArloCameraRTCSignalingSession(self)
await plugin_session.initialize()
scrypted_setup = {
"type": "offer",
"audio": {
"direction": "sendrecv" if self._can_push_to_talk() else "recvonly",
},
"video": {
"direction": "recvonly",
}
}
plugin_setup = {}
scrypted_offer = await scrypted_session.createLocalDescription("offer", scrypted_setup, sendIceCandidate=plugin_session.addIceCandidate)
self.logger.info(f"Scrypted offer sdp:\n{scrypted_offer['sdp']}")
await plugin_session.setRemoteDescription(scrypted_offer, plugin_setup)
plugin_answer = await plugin_session.createLocalDescription("answer", plugin_setup, scrypted_session.sendIceCandidate)
self.logger.info(f"Scrypted answer sdp:\n{plugin_answer['sdp']}")
await scrypted_session.setRemoteDescription(plugin_answer, scrypted_setup)
return ArloCameraRTCSessionControl(plugin_session)
async def startIntercom(self, media) -> None:
self.logger.info("Starting intercom")
self.intercom_session = ArloCameraRTCSignalingSession(self)
await self.intercom_session.initialize_push_to_talk(media)
if self.uses_sip_push_to_talk:
sip_info = self.provider.arlo.GetSIPInfo()
sip_call_info = sip_info["sipCallInfo"]
ice_servers = [{"url": "stun:stun.l.google.com:19302"}]
self.logger.debug(f"Will use ice servers: {[ice['url'] for ice in ice_servers]}")
ice_servers = scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string([ice['url']]),
ice.get('username', ''),
ice.get('credential', '')
)
for ice in ice_servers
])
sip_cfg = scrypted_arlo_go.SIPInfo(
DeviceID=self.nativeId,
CallerURI=f"sip:{sip_call_info['id']}@{sip_call_info['domain']}:{sip_call_info['port']}",
CalleeURI=sip_call_info['calleeUri'],
Password=sip_call_info['password'],
UserAgent="SIP.js/0.20.1",
WebsocketURI="wss://livestream-z2-prod.arlo.com:7443",
WebsocketOrigin="https://my.arlo.com",
WebsocketHeaders=scrypted_arlo_go.HeadersMap({"User-Agent": USER_AGENTS["arlo"]}),
)
self.goSM = scrypted_arlo_go.NewSIPWebRTCManager("Arlo SIP "+self.nativeId, ice_servers, sip_cfg)
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
audio_port = self.goSM.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
ffmpeg_args = [
"-y",
"-hide_banner",
"-loglevel", "error",
"-analyzeduration", "0",
"-fflags", "-nobuffer",
"-probesize", "500000",
*ffmpeg_params["inputArguments"],
"-vn",
"-acodec", "libopus",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
]
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with '{' '.join(ffmpeg_args)}'")
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo Subprocess "+self.logger_name, ffmpeg_path, *ffmpeg_args)
self.intercom_ffmpeg_subprocess.start()
self.goSM.Start()
else:
# we need to do signaling through arlo cloud apis
self.intercom_session = ArloCameraIntercomSession(self)
await self.intercom_session.initialize_push_to_talk(media)
self.logger.info("Intercom ready")
@async_print_exception_guard
async def stopIntercom(self) -> None:
self.logger.info("Stopping intercom")
if self.intercom_session is not None:
await self.intercom_session.shutdown()
self.intercom_session = None
def _can_push_to_talk(self) -> bool:
# Right now, only implement push to talk for basestation cameras
return self.arlo_device["deviceId"] != self.arlo_device["parentId"]
if self.goSM is not None:
self.goSM.Close()
self.goSM = None
async def getVideoClip(self, videoId: str) -> MediaObject:
self.logger.info(f"Getting video clip {videoId}")
@@ -476,7 +603,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
return self.vss
class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
class ArloCameraIntercomSession(BackgroundTaskMixin):
def __init__(self, camera):
super().__init__()
self.camera = camera
@@ -485,10 +612,8 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
self.arlo_device = camera.arlo_device
self.arlo_basestation = camera.arlo_basestation
self.ffmpeg_subprocess = None
self.intercom_ffmpeg_subprocess = None
self.scrypted_pc = None
self.arlo_pc = None
self.arlo_sdp_answered = False
@@ -535,24 +660,26 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
self.provider.arlo.SubscribeToCandidateAnswers(self.arlo_basestation, self.arlo_device, callback)
)
async def initialize(self):
self.logger.info("Initializing video stream for RTC")
rtsp_url = await self.camera._getVideoStreamURL()
@async_print_exception_guard
async def initialize_push_to_talk(self, media):
self.logger.info("Initializing push to talk")
cfg = scrypted_arlo_go.WebRTCConfiguration(
ICEServers=scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string(["turn:turn0.clockworkmod.com", "turn:n0.clockworkmod.com", "turn:n1.clockworkmod.com"]),
"foo",
"bar"
)
])
)
cfg = scrypted_arlo_go.WebRTCConfiguration()
self.scrypted_pc = scrypted_arlo_go.NewWebRTCManager("Arlo "+self.camera.logger_name, cfg)
session_id, ice_servers = self.provider.arlo.StartPushToTalk(self.arlo_basestation, self.arlo_device)
self.logger.debug(f"Received ice servers: {[ice['url'] for ice in ice_servers]}")
audio_port = self.scrypted_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
video_port = self.scrypted_pc.InitializeVideoRTPListener(scrypted_arlo_go.WebRTCMimeTypeH264)
ice_servers = scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string([ice['url']]),
ice.get('username', ''),
ice.get('credential', '')
)
for ice in ice_servers
])
self.arlo_pc = scrypted_arlo_go.NewWebRTCManager("Arlo WebRTC "+self.camera.logger_name, ice_servers)
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
audio_port = self.arlo_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
ffmpeg_args = [
@@ -561,200 +688,48 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
"-loglevel", "error",
"-analyzeduration", "0",
"-fflags", "-nobuffer",
"-max_probe_packets", "2",
"-vcodec", "h264",
"-acodec", "aac",
"-i", rtsp_url,
"-an",
"-vcodec", "copy",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{video_port}",
"-probesize", "500000",
*ffmpeg_params["inputArguments"],
"-vn",
"-acodec", "libopus",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
]
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with {ffmpeg_args}")
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with '{' '.join(ffmpeg_args)}'")
self.ffmpeg_subprocess = HeartbeatChildProcess("Arlo "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
self.ffmpeg_subprocess.start()
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo Subprocess "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
self.intercom_ffmpeg_subprocess.start()
if self.camera._can_push_to_talk():
self.create_task(self.initialize_push_to_talk())
self.sdp_answered = False
async def initialize_push_to_talk(self, media=None):
try:
self.logger.info("Initializing push to talk")
offer = self.arlo_pc.CreateOffer()
offer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(offer)
self.logger.info(f"Arlo offer sdp:\n{offer_sdp}")
session_id, ice_servers = self.provider.arlo.StartPushToTalk(self.arlo_basestation, self.arlo_device)
self.logger.debug(f"Received ice servers: {[ice['url'] for ice in ice_servers]}")
self.arlo_pc.SetLocalDescription(offer)
cfg = scrypted_arlo_go.WebRTCConfiguration(
ICEServers=scrypted_arlo_go.Slice_webrtc_ICEServer([
scrypted_arlo_go.NewWebRTCICEServer(
scrypted_arlo_go.go.Slice_string([ice['url']]),
ice.get('username', ''),
ice.get('credential', '')
)
for ice in ice_servers
])
)
self.arlo_pc = scrypted_arlo_go.NewWebRTCManager("Arlo "+self.camera.logger_name, cfg)
self.provider.arlo.NotifyPushToTalkSDP(
self.arlo_basestation, self.arlo_device,
session_id, offer_sdp
)
if media is not None:
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
audio_port = self.arlo_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
ffmpeg_args = [
"-y",
"-hide_banner",
"-loglevel", "error",
"-analyzeduration", "0",
"-fflags", "-nobuffer",
"-probesize", "500000",
*ffmpeg_params["inputArguments"],
"-vn",
"-acodec", "libopus",
"-f", "rtp",
"-flush_packets", "1",
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
]
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with {ffmpeg_args}")
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
self.intercom_ffmpeg_subprocess.start()
else:
self.logger.debug("Starting audio track forwarder")
self.scrypted_pc.ForwardAudioTo(self.arlo_pc)
self.logger.debug("Started audio track forwarder")
self.sdp_answered = False
offer = self.arlo_pc.CreateOffer()
offer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(offer)
self.logger.info(f"Arlo offer sdp:\n{offer_sdp}")
self.arlo_pc.SetLocalDescription(offer)
self.provider.arlo.NotifyPushToTalkSDP(
candidates = self.arlo_pc.WaitAndGetICECandidates()
self.logger.debug(f"Gathered {len(candidates)} candidates")
for candidate in candidates:
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
).Candidate
self.logger.debug(f"Sending candidate to Arlo: {candidate}")
self.provider.arlo.NotifyPushToTalkCandidate(
self.arlo_basestation, self.arlo_device,
session_id, offer_sdp
session_id, candidate,
)
def forward_candidates():
try:
candidates = self.arlo_pc.WaitAndGetICECandidates()
self.logger.debug(f"Gathered {len(candidates)} candidates")
for candidate in candidates:
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
).Candidate
self.logger.debug(f"Sending candidate to Arlo: {candidate}")
self.provider.arlo.NotifyPushToTalkCandidate(
self.arlo_basestation, self.arlo_device,
session_id, candidate,
)
except Exception as e:
self.logger.error(e)
t = threading.Thread(target=forward_candidates)
t.start()
except Exception as e:
self.logger.error(e)
async def createLocalDescription(self, type, setup, sendIceCandidate=None):
if type == "offer":
raise Exception("can only create answers in ArloCameraRTCSignalingSession.createLocalDescription")
answer = self.scrypted_pc.CreateAnswer()
answer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(answer)
self.scrypted_pc.SetLocalDescription(answer)
if sendIceCandidate is not None:
loop = asyncio.get_event_loop()
def forward_candidates():
try:
candidates = self.scrypted_pc.WaitAndGetICECandidates()
self.logger.debug(f"Gathered {len(candidates)} candidates")
for candidate in candidates:
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
).Candidate
self.logger.debug(f"Sending candidate to scrypted: {candidate}")
loop.call_soon_threadsafe(
self.create_task,
sendIceCandidate({
"candidate": candidate,
"sdpMid": "0",
"sdpMLineIndex": 0,
})
)
except Exception as e:
self.logger.error(e)
t = threading.Thread(target=forward_candidates)
t.start()
return {
"sdp": answer_sdp,
"type": "answer"
}
async def setRemoteDescription(self, description, setup):
if description["type"] != "offer":
raise Exception("can only accept offers in ArloCameraRTCSignalingSession.createLocalDescription")
sdp = scrypted_arlo_go.WebRTCSessionDescription(scrypted_arlo_go.NewWebRTCSDPType("offer"), description["sdp"])
self.scrypted_pc.SetRemoteDescription(sdp)
async def addIceCandidate(self, candidate):
candidate = scrypted_arlo_go.WebRTCICECandidateInit(candidate["candidate"], "0", 0)
self.scrypted_pc.AddICECandidate(candidate)
async def getOptions(self):
pass
async def unmute_relay(self):
return
await self.arlo_pc.unmute_relay(self.arlo_relay_track)
async def mute_relay(self):
return
await self.arlo_pc.mute_relay(self.arlo_relay_track)
async def shutdown(self):
if self.ffmpeg_subprocess is not None:
self.ffmpeg_subprocess.stop()
self.ffmpeg_subprocess = None
if self.intercom_ffmpeg_subprocess is not None:
self.intercom_ffmpeg_subprocess.stop()
self.intercom_ffmpeg_subprocess = None
if self.scrypted_pc is not None:
self.scrypted_pc.Close()
self.scrypted_pc = None
if self.arlo_pc is not None:
self.arlo_pc.Close()
self.arlo_pc = None
class ArloCameraRTCSessionControl:
def __init__(self, arlo_session):
self.arlo_session = arlo_session
self.logger = arlo_session.logger
async def setPlayback(self, options):
self.logger.debug(f"setPlayback options {options}")
audio = options.get("audio")
if audio is None:
return
if audio:
await self.arlo_session.unmute_relay()
else:
await self.arlo_session.mute_relay()
async def endSession(self):
self.logger.info("Ending RTC session")
await self.arlo_session.shutdown()
self.arlo_pc = None

View File

@@ -0,0 +1 @@
EXPERIMENTAL = False

View File

@@ -87,6 +87,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
@property
def arlo_transport(self) -> str:
return "SSE"
# This code is here for posterity, however it looks that as of 06/01/2023
# Arlo has disabled the MQTT backend
transport = self.storage.getItem("arlo_transport")
if transport is None or transport not in ArloProvider.arlo_transport_choices:
transport = "SSE"
@@ -149,13 +152,15 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
def arlo(self) -> Arlo:
if self._arlo is not None:
if self._arlo_mfa_complete_auth is not None:
if self._arlo_mfa_code == "":
if not self._arlo_mfa_code:
return None
self.logger.info("Completing Arlo MFA...")
self._arlo_mfa_complete_auth(self._arlo_mfa_code)
self._arlo_mfa_complete_auth = None
self._arlo_mfa_code = None
try:
self._arlo_mfa_complete_auth(self._arlo_mfa_code)
finally:
self._arlo_mfa_complete_auth = None
self._arlo_mfa_code = None
self.logger.info("Arlo MFA done")
self.storage.setItem("arlo_auth_headers", json.dumps(dict(self._arlo.request.session.headers.items())))
@@ -175,7 +180,6 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
if headers:
self._arlo.UseExistingAuth(self.arlo_user_id, json.loads(headers))
self.logger.info(f"Initialized Arlo client, reusing stored auth headers")
self.create_task(self.do_arlo_setup())
return self._arlo
else:
@@ -185,6 +189,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
except Exception as e:
traceback.print_exc()
self._arlo = None
self._arlo_mfa_complete_auth = None
self._arlo_mfa_code = None
return None
@@ -455,9 +460,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
"group": "General",
"key": "arlo_transport",
"title": "Underlying Transport Protocol",
"description": "Select the underlying transport protocol used to connect to Arlo Cloud.",
"description": "Arlo Cloud currently only supports the SSE protocol.",
"value": self.arlo_transport,
"choices": self.arlo_transport_choices,
"readonly": True,
},
{
"group": "General",
@@ -573,7 +578,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
self.scrypted_devices = {}
camera_devices = []
provider_to_device_map = {}
provider_to_device_map = {None: []}
basestations = self.arlo.GetDevices(['basestation', 'siren'])
for basestation in basestations:
@@ -627,7 +632,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
device = await self.getDevice_impl(nativeId)
scrypted_interfaces = device.get_applicable_interfaces()
manifest = device.get_device_manifest()
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']} parent {camera['parentId']}): {scrypted_interfaces}")
if camera["deviceId"] == camera["parentId"]:
provider_to_device_map.setdefault(None, []).append(manifest)
@@ -647,6 +652,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
if len(cameras) != len(camera_devices):
self.logger.info(f"Discovered {len(cameras)} cameras, but only {len(camera_devices)} are usable")
self.logger.info(f"Are all cameras shared with admin permissions?")
else:
self.logger.info(f"Discovered {len(cameras)} cameras")

View File

@@ -1,8 +1,11 @@
paho-mqtt==1.6.1
sseclient==0.0.22
aiohttp==3.8.4
requests==2.28.2
cachetools==5.3.0
scrypted-arlo-go==0.0.1
scrypted-arlo-go==0.1.3
cloudscraper==1.2.71
async-timeout==4.0.2
--extra-index-url=https://www.piwheels.org/simple/
--extra-index-url=https://bjia56.github.io/scrypted-arlo-go/
--prefer-binary

View File

@@ -2,7 +2,9 @@
The C300X Plugin for Scrypted allows viewing your C300X intercom with incoming video/audio.
WARNING: You will need access to the device, see https://github.com/fquinto/bticinoClasse300x
WARNING: You will need access to the device, see https://github.com/fquinto/bticinoClasse300x.
You also need the **[c300x-controller](https://github.com/slyoldfox/c300x-controller)** and node (v17.9.1) running on your device which will expose an API for the intercom.
## Development instructions
@@ -17,12 +19,37 @@ $ num run scrypted-deploy 127.0.0.1
After flashing a custom firmware you must at least:
* Install [node](https://nodejs.org/download/release/latest-v17.x/node-v17.9.1-linux-armv7l.tar.gz) on your device and run the c300x-controller on the device
* Install [/lib/libatomic.so.1](http://ftp.de.debian.org/debian/pool/main/g/gcc-10-cross/libatomic1-armhf-cross_10.2.1-6cross1_all.deb) in **/lib**
* Allow access to the SIP server on port 5060
* Allow your IP to authenticated with the SIP server
* Add a SIP user for scrypted
To do this use the guide below:
## Installing node and c300x-controller
```
$ cd /home/bticino/cfg/extra/
$ mkdir node
$ cd node
$ wget https://nodejs.org/download/release/latest-v17.x/node-v17.9.1-linux-armv7l.tar.gz
$ tar xvfz node-v17.9.1-linux-armv7l.tar.gz
```
Node will require libatomic.so.1 which isn't shipped with the device, get the .deb file from http://ftp.de.debian.org/debian/pool/main/g/gcc-10-cross/libatomic1-armhf-cross_10.2.1-6cross1_all.deb
```
$ ar x libatomic1-armhf-cross_10.2.1-6cross1_all.deb
```
scp the `libatomic.so.1` to `/lib` and check that node works:
```
$ root@C3X-00-00-00-00-00--2222222:~# /home/bticino/cfg/extra/node/bin/node -v
v17.9.1
```
## Make flexisip listen on a reachable IP and add users to it
To be able to talk to our own SIP server, we need to make the SIP server on the C300X
@@ -93,7 +120,7 @@ hashed-passwords=true
reject-wrong-client-certificates=true
````
Now we will add a `user agent` (user) that will be used by `baresip` to register itself with `flexisip`
Now we will add a `user agent` (user) that will be used by `scrypted` to register itself with `flexisip`
Edit the `/etc/flexisip/users/users.db.txt` file and create a new line by copy/pasting the c300x user.
@@ -101,7 +128,7 @@ For example:
````
c300x@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
baresip@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
scrypted@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
````
Leave the md5 as the same value - I use `fffff....` just for this example.
@@ -110,7 +137,7 @@ Edit the `/etc/flexisip/users/route.conf` file and add a new line to it, it spec
Change the IP address to the place where you will run `baresip` (same as `trusted-hosts` above)
````
<sip:baresip@1234567.bs.iotleg.com> <sip:192.168.0.XX>
<sip:scrypted@1234567.bs.iotleg.com> <sip:192.168.0.XX>
````
Edit the `/etc/flexisip/users/route_int.conf` file.
@@ -121,7 +148,7 @@ You can look at it as a group of users that is called when you call `alluser@123
Add your username at the end (make sure you stay on the same line, NOT a new line!)
````
<sip:alluser@1234567.bs.iotleg.com> ..., <sip:baresip@1234567.bs.iotleg.com>
<sip:alluser@1234567.bs.iotleg.com> ..., <sip:scrypted@1234567.bs.iotleg.com>
````
Reboot and verify flexisip is listening on the new IP address.

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/bticino",
"version": "0.0.7",
"version": "0.0.9",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/bticino",
"version": "0.0.7",
"version": "0.0.9",
"dependencies": {
"@slyoldfox/sip": "^0.0.6-1",
"sdp": "^3.0.3",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/bticino",
"version": "0.0.7",
"version": "0.0.9",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -2,7 +2,7 @@ import { closeQuiet, createBindZero, listenZeroSingleClient } from '@scrypted/co
import { sleep } from '@scrypted/common/src/sleep';
import { RtspServer } from '@scrypted/common/src/rtsp-server';
import { addTrackControls } from '@scrypted/common/src/sdp-utils';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import { SipCallSession } from '../../sip/src/sip-call-session';
import { RtpDescription } from '../../sip/src/rtp-utils';
import { VoicemailHandler } from './bticino-voicemailHandler';
@@ -19,11 +19,12 @@ import { InviteHandler } from './bticino-inviteHandler';
import { SipRequest } from '../../sip/src/sip-manager';
import { get } from 'http'
import { ControllerApi } from './c300x-controller-api';
const STREAM_TIMEOUT = 65000;
const { mediaManager } = sdk;
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips {
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips, Reboot {
private session: SipCallSession
private remoteRtpDescription: RtpDescription
@@ -35,8 +36,9 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
public requestHandlers: CompositeSipMessageHandler = new CompositeSipMessageHandler()
public incomingCallRequest : SipRequest
private settingsStorage: BticinoStorageSettings = new BticinoStorageSettings( this )
public voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
private voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
private inviteHandler : InviteHandler = new InviteHandler(this)
private controllerApi : ControllerApi = new ControllerApi(this)
//TODO: randomize this
private keyAndSalt : string = "/qE7OPGKp9hVGALG2KcvKWyFEZfSSvm7bYVDjT8X"
//private decodedSrtpOptions : SrtpOptions = decodeSrtpOptions( this.keyAndSalt )
@@ -55,14 +57,24 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
})();
}
reboot(): Promise<void> {
return new Promise<void>( (resolve,reject ) => {
let c300x = SipHelper.getIntercomIp(this)
get(`http://${c300x}:8080/reboot?now`, (res) => {
console.log("Reboot API result: " + res.statusCode)
});
})
}
getVideoClips(options?: VideoClipOptions): Promise<VideoClip[]> {
return new Promise<VideoClip[]>( (resolve,reject ) => {
let c300x = SipHelper.getIntercomIp(this)
if( !c300x ) return []
get(`http://${c300x}:8080/videoclips?raw=true&startTime=${options.startTime/1000}&endTime=${options.endTime/1000}`, (res) => {
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
const parsedData : [] = JSON.parse(rawData);
let videoClips : VideoClip[] = []
@@ -93,7 +105,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
return mediaManager.createMediaObjectFromUrl(url);
}
getVideoClipThumbnail(thumbnailId: string): Promise<MediaObject> {
let c300x = SipHelper.sipOptions(this)
let c300x = SipHelper.getIntercomIp(this)
const url = `http://${c300x}:8080/voicemail?msg=${thumbnailId}/aswm.jpg&raw=true`;
return mediaManager.createMediaObjectFromUrl(url);
}
@@ -224,8 +236,6 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
this.stopSession();
const { clientPromise: playbackPromise, port: playbackPort, url: clientUrl } = await listenZeroSingleClient()
const playbackUrl = clientUrl
@@ -234,6 +244,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
client.setKeepAlive(true, 10000)
let sip: SipCallSession
try {
await this.controllerApi.updateStreamEndpoint()
let rtsp: RtspServer;
const cleanup = () => {
client.destroy();
@@ -366,6 +377,9 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
this.voicemailHandler.cancelTimer()
this.persistentSipManager.cancelTimer()
this.controllerApi.cancelTimer()
}
reset() {

View File

@@ -6,7 +6,7 @@ export class VoicemailHandler extends SipRequestHandler {
constructor( private sipCamera : BticinoSipCamera ) {
super()
setTimeout( () => {
this.timeout = setTimeout( () => {
// Delay a bit an run in a different thread in case this fails
this.checkVoicemail()
}, 10000 )
@@ -25,7 +25,7 @@ export class VoicemailHandler extends SipRequestHandler {
this.timeout = setTimeout( () => this.checkVoicemail() , 5 * 60 * 1000 )
}
cancelVoicemailCheck() {
cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}

View File

@@ -0,0 +1,125 @@
import * as nodeIp from "ip";
import { get } from 'http'
import * as net from 'net'
import { BticinoSipCamera } from "./bticino-camera";
import { SipHelper } from './sip-helper';
export class ControllerApi {
private timeout : NodeJS.Timeout
constructor( private sipCamera : BticinoSipCamera ) {
this.timeout = setTimeout( () => {
// Delay a bit an run in a different thread in case this fails
this.registerEndpoints( true )
}, 5000 )
}
/**
* Will validate certain requirements for scrypted to work correctly with the intercom:
*/
public static validate( ipAddress ) {
return this.validateFlexisipSipPort(ipAddress).then( this.validateController )
}
/**
* Will validate if the non secure SIP port was opened after modifying /etc/init.d/flexisipsh
*/
private static validateFlexisipSipPort( ipAddress : string ) : Promise<string> {
let conn = net.createConnection( { host: ipAddress, port: 5060, timeout: 5000 } )
return new Promise( (resolve, reject) => {
conn.setTimeout(5000);
conn.on('connect', () => resolve( ipAddress ));
conn.on('timeout', () => reject( new Error("Timeout connecting to port 5060, is this a Bticino intercom? Did you change /etc/init.d/flexisipsh to make it listen on this port?") ) );
conn.on('error', () => reject( new Error("Error connecting to port 5060, is this a Bticino intercom? Did you change /etc/init.d/flexisipsh to make it listen on this port?") ) );
})
}
/**
* Will validate if the c300x-controller is running on port 8080.
* The c300x-controller will return errors if some configuration errors are present on the intercom.
*/
private static validateController( ipAddress : string ) : Promise<void> {
// Will throw an exception if invalid format
const c300x = nodeIp.toBuffer( ipAddress )
const validatedIp = nodeIp.toString(c300x)
const url = `http://${validatedIp}:8080/validate-setup?raw=true`
return new Promise( (resolve, reject) => get(url, (res) => {
let body = "";
res.on("data", data => { body += data });
res.on("end", () => {
try {
let parsedBody = JSON.parse( body )
if( parsedBody["errors"].length > 0 ) {
reject( new Error( parsedBody["errors"][0] ) )
} else {
parsedBody["ipAddress"] = validatedIp
resolve( parsedBody )
}
} catch( e ) {
reject( e )
}
})
res.on("error", (e) => { reject(e)})
if( res.statusCode != 200 ) {
reject( new Error(`Could not validate required c300x-controller. Check ${url}`) )
}
} ).on("error", (e) => { reject(`Could not connect to the c300x-controller at ${url}`) }) )
}
/**
* This verifies if the intercom is customized correctly. It verifies:
*
* - if a dedicated scrypted sip user is added for this specific camera instance in /etc/flexisip/users/users.db.txt
* - if this dedicated scrypted sip user is configured in /etc/flexisip/users/route.conf and /etc/flexisip/users/route_int.conf
*/
public registerEndpoints( verifyUser : boolean ) {
let ipAddress = SipHelper.getIntercomIp(this.sipCamera)
let sipFrom = SipHelper.getIdentifier(this.sipCamera)
const pressed = Buffer.from(this.sipCamera.doorbellWebhookUrl + 'pressed').toString('base64')
const locked = Buffer.from(this.sipCamera.doorbellLockWebhookUrl + 'locked').toString('base64')
const unlocked = Buffer.from(this.sipCamera.doorbellLockWebhookUrl + 'unlocked').toString('base64')
get(`http://${ipAddress}:8080/register-endpoint?raw=true&identifier=${sipFrom}&pressed=${pressed}&locked=${locked}&unlocked=${unlocked}&verifyUser=${verifyUser}`, (res) => {
if( verifyUser ) {
let body = "";
res.on("data", data => { body += data });
res.on("end", () => {
try {
let parsedBody = JSON.parse( body )
if( parsedBody["errors"].length > 0 ) {
this.sipCamera.log.a("This camera is not setup correctly, it will not be able to receive the incoming doorbell stream. Check the console for the errors.")
parsedBody["errors"].forEach( error => {
this.sipCamera.console.error( "ERROR: " + error )
});
}
} catch( e ) {
this.sipCamera.console.error("Error parsing body to JSON: " + body )
}
})
}
console.log("Endpoint registration status: " + res.statusCode)
});
// The default evict time on the c300x-controller is 5 minutes, so this will certainly be within bounds
this.timeout = setTimeout( () => this.registerEndpoints( false ) , 2 * 60 * 1000 )
}
/**
* Informs the c300x-controller where to send the stream to
*/
public updateStreamEndpoint() : Promise<void> {
let ipAddress = SipHelper.getIntercomIp(this.sipCamera)
let sipFrom = SipHelper.getIdentifier(this.sipCamera)
return new Promise( (resolve, reject) => get(`http://${ipAddress}:8080/register-endpoint?raw=true&updateStreamEndpoint=${sipFrom}`, (res) => {
if( res.statusCode != 200 ) reject( "ERROR: Could not update streaming endpoint, call returned: " + res.statusCode )
else resolve()
} ) );
}
public cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}
}
}

View File

@@ -1,6 +1,7 @@
import sdk, { Device, DeviceCreator, DeviceCreatorSettings, DeviceProvider, LockState, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, Setting } from '@scrypted/sdk'
import { randomBytes } from 'crypto'
import { BticinoSipCamera } from './bticino-camera'
import { ControllerApi } from './c300x-controller-api';
const { systemManager, deviceManager } = sdk
@@ -14,41 +15,60 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
key: 'newCamera',
title: 'Add Camera',
placeholder: 'Camera name, e.g.: Back Yard Camera, Baby Camera, etc',
}
},
{
key: 'ip',
title: 'IP Address',
placeholder: 'IP Address of the C300X intercom',
}
]
}
async createDevice(settings: DeviceCreatorSettings): Promise<string> {
const nativeId = randomBytes(4).toString('hex')
const name = settings.newCamera?.toString()
const camera = await this.updateDevice(nativeId, name)
const device: Device = {
providerNativeId: nativeId,
info: {
//model: `${camera.model} (${camera.data.kind})`,
manufacturer: 'BticinoPlugin',
//firmware: camera.data.firmware_version,
//serialNumber: camera.data.device_id
},
nativeId: nativeId + '-lock',
name: name + ' Lock',
type: ScryptedDeviceType.Lock,
interfaces: [ScryptedInterface.Lock, ScryptedInterface.HttpRequestHandler],
if( !settings.ip ) {
throw new Error('IP address is required!')
}
const ret = await deviceManager.onDevicesChanged({
providerNativeId: nativeId,
devices: [device],
let validate = ControllerApi.validate( settings.ip )
return validate.then( async (setupData) => {
const nativeId = randomBytes(4).toString('hex')
const name = settings.newCamera?.toString() === undefined ? "Doorbell" : settings.newCamera?.toString()
await this.updateDevice(nativeId, name)
const device: Device = {
providerNativeId: nativeId,
info: {
//model: `${camera.model} (${camera.data.kind})`,
manufacturer: 'BticinoPlugin',
//firmware: camera.data.firmware_version,
//serialNumber: camera.data.device_id
},
nativeId: nativeId + '-lock',
name: name + ' Lock',
type: ScryptedDeviceType.Lock,
interfaces: [ScryptedInterface.Lock, ScryptedInterface.HttpRequestHandler],
}
await deviceManager.onDevicesChanged({
providerNativeId: nativeId,
devices: [device],
})
let sipCamera : BticinoSipCamera = await this.getDevice(nativeId)
sipCamera.putSetting("sipfrom", "scrypted-" + sipCamera.id + "@127.0.0.1")
sipCamera.putSetting("sipto", "c300x@" + setupData["ipAddress"] )
sipCamera.putSetting("sipdomain", setupData["domain"])
sipCamera.putSetting("sipdebug", true )
systemManager.getDeviceById<BticinoSipCamera>(sipCamera.id)
let lock = await sipCamera.getDevice(undefined)
lock.lockState = LockState.Locked
return nativeId
})
let sipCamera : BticinoSipCamera = await this.getDevice(nativeId)
let foo : BticinoSipCamera = systemManager.getDeviceById<BticinoSipCamera>(sipCamera.id)
let lock = await sipCamera.getDevice(undefined)
lock.lockState = LockState.Locked
return nativeId
}
updateDevice(nativeId: string, name: string) {
@@ -69,7 +89,8 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
ScryptedInterface.BinarySensor,
ScryptedDeviceType.DeviceProvider,
ScryptedInterface.HttpRequestHandler,
ScryptedInterface.VideoClips
ScryptedInterface.VideoClips,
ScryptedInterface.Reboot
],
type: ScryptedDeviceType.Doorbell,
})
@@ -86,7 +107,6 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
async releaseDevice(id: string, nativeId: string): Promise<void> {
let camera = this.devices.get(nativeId)
if( camera ) {
camera.voicemailHandler.cancelVoicemailCheck()
if( this.devices.delete( nativeId ) ) {
this.console.log("Removed device from list: " + id + " / " + nativeId )
}

View File

@@ -14,10 +14,11 @@ export class PersistentSipManager {
private sipManager : SipManager
private lastRegistration : number = 0
private expireInterval : number = 0
private timeout : NodeJS.Timeout
constructor( private camera : BticinoSipCamera ) {
// Give it a second and run in seperate thread to avoid failure on creation for from/to/domain check
setTimeout( () => this.enable() , CHECK_INTERVAL )
this.timeout = setTimeout( () => this.enable() , CHECK_INTERVAL )
}
async enable() : Promise<SipManager> {
@@ -56,7 +57,7 @@ export class PersistentSipManager {
this.lastRegistration = now + (60 * 1000) - this.expireInterval
throw e
} finally {
setTimeout( () => this.register(), CHECK_INTERVAL )
this.timeout = setTimeout( () => this.register(), CHECK_INTERVAL )
}
}
@@ -65,6 +66,12 @@ export class PersistentSipManager {
return SipCallSession.createCallSession(this.camera.console, "Bticino", sipOptions, sm )
}
cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}
}
reloadSipOptions() {
this.sipManager?.setSipOptions( null )
}

View File

@@ -39,6 +39,15 @@ export class SipHelper {
}
}
public static getIdentifier( camera : BticinoSipCamera ) : string {
let to = camera.storage.getItem('sipfrom')?.trim();
const domain = camera.storage.getItem('sipdomain')?.trim()
if( to ) {
return to.split('@')[0] + '%40' + domain;
}
return
}
public static getIntercomIp( camera : BticinoSipCamera ): string {
let to = camera.storage.getItem('sipto')?.trim();
if( to ) {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.1.114",
"version": "0.1.129",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.1.114",
"version": "0.1.129",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.1.114",
"version": "0.1.129",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -19,7 +19,7 @@ export class LauncherMixin extends ScryptedDeviceBase implements MixinProvider,
async getMixin(mixinDevice: any, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: DeviceState): Promise<any> {
mixinDeviceState.applicationInfo = {
icon: 'fa ' + typeToIcon(mixinDeviceState.type),
href: '/endpoint/@scrypted/core/public/#/device/' + mixinDeviceState.id,
href: '#/device/' + mixinDeviceState.id,
}
return mixinDevice;
}

View File

@@ -35,7 +35,6 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
router: any = Router();
publicRouter: any = Router();
mediaCore: MediaCore;
launcher: LauncherMixin;
scriptCore: ScriptCore;
aggregateCore: AggregateCore;
automationCore: AutomationCore;
@@ -73,7 +72,6 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
type: ScryptedDeviceType.Builtin,
},
);
this.mediaCore = new MediaCore('mediacore');
})();
(async () => {
await deviceManager.onDeviceDiscovered(
@@ -84,7 +82,6 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
type: ScryptedDeviceType.Builtin,
},
);
this.scriptCore = new ScriptCore();
})();
(async () => {
@@ -96,7 +93,6 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
type: ScryptedDeviceType.Builtin,
},
);
this.automationCore = new AutomationCore();
})();
deviceManager.onDeviceDiscovered({
@@ -119,7 +115,6 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
type: ScryptedDeviceType.Builtin,
},
);
this.aggregateCore = new AggregateCore();
})();
@@ -132,7 +127,6 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
type: ScryptedDeviceType.Builtin,
},
);
this.users = new UsersCore();
})();
}
@@ -145,6 +139,7 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
}
return this.storageSettings.getSettings();
}
async putSetting(key: string, value: SettingValue): Promise<void> {
await this.storageSettings.putSetting(key, value);
}
@@ -153,15 +148,15 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
if (nativeId === 'launcher')
return new LauncherMixin('launcher');
if (nativeId === 'mediacore')
return this.mediaCore;
return this.mediaCore ||= new MediaCore();
if (nativeId === ScriptCoreNativeId)
return this.scriptCore;
return this.scriptCore ||= new ScriptCore();
if (nativeId === AutomationCoreNativeId)
return this.automationCore;
return this.automationCore ||= new AutomationCore()
if (nativeId === AggregateCoreNativeId)
return this.aggregateCore;
return this.aggregateCore ||= new AggregateCore();
if (nativeId === UsersNativeId)
return this.users;
return this.users ||= new UsersCore();
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
@@ -218,9 +213,9 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
const u = new URL(endpoint);
const rewritten = indexHtml
.replace('href="/endpoint/@scrypted/core/public/manifest.json"', `href="/endpoint/@scrypted/core/public/manifest.json${u.search}"`)
.replace('href="/endpoint/@scrypted/core/public/img/icons/apple-touch-icon-152x152.png"', `href="/endpoint/@scrypted/core/public/img/icons/apple-touch-icon-152x152.png${u.search}"`)
.replace('href="/endpoint/@scrypted/core/public/img/icons/safari-pinned-tab.svg"', `href="/endpoint/@scrypted/core/public/img/icons/safari-pinned-tab.svg${u.search}"`)
.replace('href="manifest.json"', `href="manifest.json${u.search}"`)
.replace('href="img/icons/apple-touch-icon-152x152.png"', `href="img/icons/apple-touch-icon-152x152.png${u.search}"`)
.replace('href="img/icons/safari-pinned-tab.svg"', `href="img/icons/safari-pinned-tab.svg${u.search}"`)
;
response.send(rewritten, {
headers: {

View File

@@ -5,6 +5,7 @@ const { systemManager, deviceManager, mediaManager, endpointManager } = sdk;
import { RequestMediaObjectHost, FileHost, BufferHost } from './converters';
import url from 'url';
export const MediaCoreNativeId = 'mediacore';
export class MediaCore extends ScryptedDeviceBase implements DeviceProvider, BufferConverter, HttpRequestHandler {
httpHost: BufferHost;
httpsHost: BufferHost;
@@ -12,8 +13,8 @@ export class MediaCore extends ScryptedDeviceBase implements DeviceProvider, Buf
fileHost: FileHost;
filesHost: FileHost;
constructor(nativeId: string) {
super(nativeId);
constructor() {
super(MediaCoreNativeId);
this.fromMimeType = ScryptedMimeTypes.SchemePrefix + 'scrypted-media';
this.toMimeType = ScryptedMimeTypes.MediaObject;

View File

@@ -148,11 +148,12 @@
<script>
import axios from "axios";
import Drawer from "./components/Drawer.vue";
import { removeAlert, getAlertIcon } from "./components/helpers";
import router from "./router";
import { getCurrentBaseUrl, logoutScryptedClient } from '../../../../packages/client/src/index';
import Login from "./Login.vue";
import Reconnect from "./Reconnect.vue";
import Drawer from "./components/Drawer.vue";
import { getAlertIcon, removeAlert } from "./components/helpers";
import router from "./router";
import store from "./store";
export default {
@@ -176,7 +177,7 @@ export default {
},
methods: {
goHome() {
window.location ='/';
window.location = getCurrentBaseUrl();
},
toggleDarkMode() {
this.darkMode = !this.darkMode;
@@ -186,8 +187,9 @@ export default {
reload() {
window.location.reload();
},
logout() {
axios.get("/logout").then(() => window.location.reload());
async logout() {
await logoutScryptedClient(getCurrentBaseUrl());
window.location.reload();
},
async clearAlerts() {
const alerts = await this.$scrypted.systemManager.getComponent("alerts");

View File

@@ -118,14 +118,15 @@
</template>
<script>
import Login from "./Login.vue";
import App from "./App.vue";
import store from "./store";
import VueRouter from "vue-router";
import Reconnect from "./Reconnect.vue";
import { getAllDevices } from "./common/mixin";
import { ScryptedInterface } from "@scrypted/types";
import axios from 'axios';
import VueRouter from "vue-router";
import { combineBaseUrl, getCurrentBaseUrl, logoutScryptedClient } from '../../../../packages/client/src/index';
import App from "./App.vue";
import Login from "./Login.vue";
import Reconnect from "./Reconnect.vue";
import { getAllDevices } from "./common/mixin";
import store from "./store";
const nvrInstall = '/component/plugin/install/@scrypted/nvr'
@@ -163,8 +164,9 @@ export default {
this.refreshApplications();
},
methods: {
logout() {
axios.get("/logout").then(() => window.location.reload());
async logout() {
await logoutScryptedClient(getCurrentBaseUrl());
window.location.reload();
},
refreshApplications() {
if (!this.$store.state.isConnected || !this.$store.state.isLoggedIn || this.$route.name !== 'Launcher')
@@ -176,10 +178,13 @@ export default {
const applications = getAllDevices(systemManager).filter(device => device.interfaces.includes(ScryptedInterface.LauncherApplication));
this.applications = applications.map(app => {
const appId = app.interfaces.includes(ScryptedInterface.ScryptedPlugin) ? app.pluginId : app.id;
const baseUrl = getCurrentBaseUrl();
const defaultUrl = combineBaseUrl(baseUrl, `endpoint/${appId}/public/`);
const ret = {
name: (app.applicationInfo && app.applicationInfo.name) || app.name,
icon: app.applicationInfo && app.applicationInfo.icon,
href: (app.applicationInfo && app.applicationInfo.href) || `/endpoint/${appId}/public/`,
href: (app.applicationInfo && app.applicationInfo.href) || defaultUrl,
};
return ret;
});

View File

@@ -5,6 +5,8 @@
<v-card-text>
<v-card-title style="justify-content: center;" class="headline text-uppercase">Scrypted
</v-card-title>
<v-card-subtitle v-if="$store.state.hasLogin === false" style="justify-content: center;" class="text-uppercase">Create Account
</v-card-subtitle>
<v-container grid-list-md>
<v-layout wrap>
<v-flex xs12>
@@ -34,12 +36,13 @@
<v-card-actions>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://twitter.com/scryptedapp/">
<v-icon small>fab fa-twitter</v-icon>
<v-btn v-on="on" icon href="https://discord.gg/DcFzmBHYGq">
<v-icon small>fab fa-discord</v-icon>
</v-btn>
</template>
<span>Twitter</span>
<span>Discord</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://www.reddit.com/r/Scrypted/">
@@ -48,6 +51,7 @@
</template>
<span>Reddit</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://github.com/koush/scrypted">
@@ -56,14 +60,7 @@
</template>
<span>Github</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn v-on="on" icon href="https://discord.gg/DcFzmBHYGq">
<v-icon small>fab fa-discord</v-icon>
</v-btn>
</template>
<span>Discord</span>
</v-tooltip>
<v-spacer></v-spacer>
<v-btn type="submit" text @click.prevent="doLogin">Log In</v-btn>
</v-card-actions>

View File

@@ -1,5 +1,5 @@
import Vue from "vue";
import { checkScryptedClientLogin, connectScryptedClient, loginScryptedClient, redirectScryptedLogin } from '../../../../packages/client/src/index';
import { checkScryptedClientLogin, connectScryptedClient, getCurrentBaseUrl, loginScryptedClient, redirectScryptedLogin } from '../../../../packages/client/src/index';
import store from './store';
function hasValue(state: any, property: string) {
@@ -22,7 +22,7 @@ function isValidDevice(id: string) {
export function loginScrypted(username: string, password: string, change_password: string) {
return loginScryptedClient({
baseUrl: undefined,
baseUrl: getCurrentBaseUrl(),
username,
password,
change_password,
@@ -33,6 +33,8 @@ Vue.use(Vue => {
Vue.prototype.$connectScrypted = () => {
const clientPromise = connectScryptedClient({
pluginId: '@scrypted/core',
// need this in case the scrypted server is proxied.
baseUrl: getCurrentBaseUrl(),
});
store.commit("setHasLogin", undefined);
@@ -40,11 +42,14 @@ Vue.use(Vue => {
store.commit("setUsername", undefined);
store.commit("setIsConnected", undefined);
return checkScryptedClientLogin()
return checkScryptedClientLogin({
baseUrl: getCurrentBaseUrl(),
})
.then(response => {
if (response.redirect) {
redirectScryptedLogin({
redirect: response.redirect,
baseUrl: getCurrentBaseUrl(),
});
return;
}

View File

@@ -36,6 +36,7 @@ import { Terminal } from "xterm";
import { FitAddon } from "xterm-addon-fit";
import eio from "engine.io-client";
import { sleep } from "../common/sleep";
import { getCurrentBaseUrl } from "../../../../../packages/client/src";
export default {
props: ["deviceId"],
@@ -64,10 +65,12 @@ export default {
},
reconnect(term) {
this.buffer = [];
const endpointPath = `/endpoint/@scrypted/core`;
const baseUrl = getCurrentBaseUrl();
const eioPath = `endpoint/@scrypted/core/engine.io/console/${this.deviceId}`;
const eioEndpoint = baseUrl ? new URL(eioPath, baseUrl).pathname : '/' + eioPath;
const options = {
path: `${endpointPath}/engine.io/console/${this.deviceId}`,
path: eioEndpoint,
};
const rootLocation = `${window.location.protocol}//${window.location.host}`;
this.socket = eio(rootLocation, options);

View File

@@ -50,6 +50,17 @@
<v-list-item-title>Discord</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-list-item link href="https://www.reddit.com/r/Scrypted/" active-class="purple white--text tile">
<v-list-item-icon>
<v-icon small>fab fa-reddit</v-icon>
</v-list-item-icon>
<v-list-item-content>
<v-list-item-title>Reddit</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-list-item link href="https://github.com/koush/scrypted" active-class="purple white--text tile">
<v-list-item-icon>
<v-icon small>fab fa-github</v-icon>
@@ -59,6 +70,7 @@
<v-list-item-title>Github</v-list-item-title>
</v-list-item-content>
</v-list-item>
<v-divider></v-divider>
<v-list-item active-class="deep-purple accent-4 white--text">
<v-list-item-icon>
@@ -137,20 +149,31 @@ export default {
getComponentViewPath,
async checkUpdateAvailable() {
await this.$connectingScrypted;
const info = await this.$scrypted.systemManager.getComponent("info");
const version = await info.getVersion();
this.currentVersion = version;
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
const serviceControl = await this.$scrypted.systemManager.getComponent(
"service-control"
);
this.updateAvailable = updateAvailable;
if (updateAvailable) {
try {
this.updateAvailable = await serviceControl.getUpdateAvailable();
}
catch (e) {
// old scrypted servers dont support this call, or it may be unimplemented
// in which case fall back and determine what the install type is.
const info = await this.$scrypted.systemManager.getComponent("info");
const version = await info.getVersion();
this.currentVersion = version;
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
);
this.updateAvailable = updateAvailable;
}
if (this.updateAvailable) {
const logger = this.$scrypted.deviceManager.getDeviceLogger();
const u = new URL(window.location)
u.hash = "#/component/settings";
logger.clearAlerts();
logger.a(`Scrypted Server update available: ${updateAvailable}. ${u}`);
logger.a(`Scrypted Server update available: ${this.updateAvailable}. ${u}`);
}
},
filterComponents: function (category) {

View File

@@ -8,6 +8,7 @@
import { Terminal } from "xterm";
import { FitAddon } from "xterm-addon-fit";
import eio from "engine.io-client";
import { getCurrentBaseUrl } from "../../../../../packages/client/src";
export default {
props: ["deviceId"],
@@ -28,10 +29,12 @@ export default {
term.open(this.$refs.terminal);
fitAddon.fit();
const endpointPath = `/endpoint/@scrypted/core`;
const baseUrl = getCurrentBaseUrl();
const eioPath = `endpoint/@scrypted/core/engine.io/repl/${this.deviceId}`;
const eioEndpoint = baseUrl ? new URL(eioPath, baseUrl).pathname : '/' + eioPath;
const options = {
path: `${endpointPath}/engine.io/repl/${this.deviceId}`,
path: eioEndpoint,
};
const rootLocation = `${window.location.protocol}//${window.location.host}`;
this.socket = eio(rootLocation, options);

View File

@@ -130,17 +130,28 @@ export default {
const info = await this.$scrypted.systemManager.getComponent("info");
const version = await info.getVersion();
this.currentVersion = version;
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
const serviceControl = await this.$scrypted.systemManager.getComponent(
"service-control"
);
this.updateAvailable = updateAvailable;
try {
this.updateAvailable = await serviceControl.getUpdateAvailable();
}
catch (e) {
// old scrypted servers dont support this call, or it may be unimplemented
// in which case fall back and determine what the install type is.
const { updateAvailable } = await checkUpdate(
"@scrypted/server",
version
);
this.updateAvailable = updateAvailable;
}
},
async loadEnv() {
const info = await this.$scrypted.systemManager.getComponent("info");
const env = await info.getScryptedEnv();
this.showRestart = !!env.SCRYPTED_CAN_RESTART;
this.canUpdate = !!env.SCRYPTED_NPM_SERVE || !!env.SCRYPTED_WEBHOOK_UPDATE;
this.canUpdate = !!env.SCRYPTED_NPM_SERVE || !!env.SCRYPTED_WEBHOOK_UPDATE || !!env.SCRYPTED_CAN_UPDATE;
},
async doRestart() {
this.restartStatus = "Restarting...";

View File

@@ -8,6 +8,7 @@
import { Terminal } from "xterm";
import { FitAddon } from "xterm-addon-fit";
import eio from "engine.io-client";
import { getCurrentBaseUrl } from "../../../../../../packages/client/src";
export default {
socket: null,
@@ -27,8 +28,11 @@ export default {
term.open(this.$refs.terminal);
fitAddon.fit();
const baseUrl = getCurrentBaseUrl();
const eioPath = `engine.io/shell`;
const eioEndpoint = baseUrl ? new URL(eioPath, baseUrl).pathname : '/' + eioPath;
const options = {
path: `/engine.io/shell`,
path: eioEndpoint,
};
const rootLocation = `${window.location.protocol}//${window.location.host}`;
this.socket = eio(rootLocation, options);

View File

@@ -1,20 +1,15 @@
import { ScryptedStatic } from '@scrypted/types';
import { DeviceManager } from '@scrypted/types';
import { DeviceCreator } from '@scrypted/types';
import { ScryptedInterface } from '@scrypted/types';
import { Scriptable } from '@scrypted/types';
import { SystemManager } from '@scrypted/types';
import { DeviceCreator, Scriptable, ScryptedInterface, ScryptedStatic, SystemManager } from '@scrypted/types';
import axios, { AxiosResponse } from 'axios';
import semver from 'semver';
import { getAllDevices } from '../../common/mixin';
import { sleep } from '../../common/sleep';
import { getComponentWebPath } from "../helpers";
const componentPath = getComponentWebPath('script');
const pluginSnapshot = require("!!raw-loader!./plugin-snapshot.ts").default.split('\n')
.filter(line => !line.includes('SCRYPTED_FILTER_EXAMPLE_LINE'))
.join('\n')
.trim();
import { getComponentWebPath } from "../helpers";
const componentPath = getComponentWebPath('script');
export interface PluginUpdateCheck {
updateAvailable?: string;
versions: any;
@@ -64,11 +59,11 @@ export async function checkUpdate(npmPackage: string, npmPackageVersion: string)
}
export async function installNpm(systemManager: SystemManager, npmPackage: string, version?: string): Promise<string> {
let suffix = version ? `/${version}` : '';
const response = await axios.post(
`${componentPath}/install/${npmPackage}${suffix}`
);
return response.data.id;
const plugins = await systemManager.getComponent('plugins');
await plugins.installNpm(npmPackage, version);
await sleep(0);
const plugin = systemManager.getDeviceById(npmPackage)
return plugin.id;
}
export function getNpmPath(npmPackage: string) {

View File

@@ -2,30 +2,28 @@
<span>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn small text v-on="on" :href="`/endpoint/${device.id}/public/`">
<v-btn small text v-on="on" :href="endpointHref">
<font-awesome-icon size="lg" :icon="['fab', 'chrome']" :color="colors.blue.base" />
</v-btn>
</template>
<span>View the public endpoint of this plugin.</span>
</v-tooltip>
<v-tooltip bottom>
<template v-slot:activator="{ on }">
<v-btn small text v-on="on" :href="`/endpoint/${device.id}/`">
<font-awesome-icon size="lg" icon="user-secret" :color="colors.red.base" />
</v-btn>
</template>
<span>View the private http endpoint of this plugin.</span>
</v-tooltip>
</span>
</template>
<script>
import { getCurrentBaseUrl } from "../../../../../packages/client/src";
import RPCInterface from "./RPCInterface.vue";
import colors from "vuetify/es5/util/colors";
export default {
mixins: [RPCInterface],
data() {
const baseUrl = getCurrentBaseUrl();
const endpoint = `endpoint/${this.device.id}/public/`;
const endpointHref = baseUrl ? new URL(endpoint, baseUrl).pathname : '/' + endpoint;
return {
endpointHref,
colors
};
},

View File

@@ -4,6 +4,7 @@
<script>
import qs from 'query-string';
import RPCInterface from "./RPCInterface.vue";
import { getCurrentBaseUrl } from '../../../../../packages/client/src';
export default {
mixins: [RPCInterface],
@@ -16,9 +17,18 @@ export default {
return true;
}
},
onClick: function () {
// https://stackoverflow.com/a/39387533
const windowReference = this.isIFrame() ? window.open(undefined, '_blank') : undefined;
onClick: async function () {
// must escape iframe for login.
if (this.isIFrame()) {
const endpointManager = this.$scrypted.endpointManager;
const ep = await endpointManager.getPublicLocalEndpoint();
const u = new URL(ep);
u.hash = window.location.hash;
u.pathname = '/endpoint/@scrypted/core/public/';
window.open(u.toString(), '_blank');
return;
}
this.rpc()
.getOauthUrl()
.then(data => {
@@ -51,10 +61,7 @@ export default {
r: window.location.toString(),
});
url.search = qs.stringify(querystring);
if (windowReference)
windowReference.location = url.toString();
else
window.location = url.toString();
window.location = url.toString();
});
}
}

View File

@@ -97,7 +97,7 @@ module.exports = {
// clientsClaim: true,
}
},
publicPath: process.env.NODE_ENV === 'production' ? '/endpoint/@scrypted/core/public' : '/',
publicPath: process.env.NODE_ENV === 'production' ? '' : '/',
runtimeCompiler: true,
devServer: {

View File

@@ -1,19 +1,19 @@
{
"name": "@scrypted/coreml",
"version": "0.1.14",
"version": "0.1.15",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.14",
"version": "0.1.15",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.85",
"version": "0.2.101",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -40,5 +40,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.14"
"version": "0.1.15"
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.126",
"version": "0.0.127",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/hikvision",
"version": "0.0.126",
"version": "0.0.127",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",

Some files were not shown because too many files have changed in this diff Show More