Compare commits

..

170 Commits

Author SHA1 Message Date
Koushik Dutta
53c4aa7066 Merge branch 'main' of github.com:koush/scrypted 2024-06-25 22:10:51 -07:00
Koushik Dutta
ce5547e4e7 server: Fix bug where mixins are not invalidated after a plugin restart fails, and then later succeeds. 2024-06-25 22:10:45 -07:00
Koushik Dutta
95bdf5c2b5 homekit: Update README.md 2024-06-25 19:18:51 -07:00
Koushik Dutta
8953a96089 homekit: Update README.md 2024-06-25 19:16:58 -07:00
Koushik Dutta
0d270454ab predict: switch everything to yolov9t/c 2024-06-25 15:46:25 -07:00
Koushik Dutta
e740a695c0 h264: refactor/cleanup common utils 2024-06-25 12:41:57 -07:00
Koushik Dutta
78118daa69 rebroadcast: fix hang if ffmpeg exits without any connection 2024-06-25 09:44:06 -07:00
Koushik Dutta
61a824d322 sdk: suppress storage event for hidden values 2024-06-25 09:04:00 -07:00
Koushik Dutta
06bac3c748 reolink: fix settings regression 2024-06-25 07:51:54 -07:00
Koushik Dutta
16b10dc353 postbeta 2024-06-25 00:16:54 -07:00
Koushik Dutta
6892b443e0 server: fix plugin fork storage desync 2024-06-25 00:16:20 -07:00
Koushik Dutta
8b303e037e rebroadcast: delete legacy code 2024-06-25 00:15:53 -07:00
Koushik Dutta
76efef37ea reolink: fix errant device provider on all devices 2024-06-24 18:15:47 -07:00
Koushik Dutta
e64a66aa66 reolink: simple password check, publish pull requests 2024-06-24 18:13:17 -07:00
Koushik Dutta
05578d28c6 rebroadcast: fix resolution reporting 2024-06-24 15:39:05 -07:00
Koushik Dutta
0889aea3be rebroadcast: reduce error logging 2024-06-24 12:24:01 -07:00
Koushik Dutta
a081e6e3c9 rebroadcast: refactor codec and resolution detection 2024-06-24 09:05:20 -07:00
Koushik Dutta
5dfa0889b7 homekit: window covering wip 2024-06-23 19:53:41 -07:00
Koushik Dutta
ed1d09b9be homekit: window covering wip 2024-06-23 19:53:07 -07:00
Koushik Dutta
2d8a986155 Merge branch 'main' of github.com:koush/scrypted 2024-06-23 17:43:38 -07:00
Koushik Dutta
1fb4cfd3b6 homekit: window covering wip 2024-06-23 17:43:34 -07:00
George Talusan
2d987747a2 reolink: add siren support (#1506) 2024-06-23 15:17:52 -07:00
Koushik Dutta
d39e4e3ff1 hikvision: fix channel id on old nvr 2024-06-22 16:48:37 -07:00
Koushik Dutta
012ca48f9a hikvision: Fix older nvr that send malformed boundary events 2024-06-22 12:46:12 -07:00
Koushik Dutta
cca1f3e000 wyze: update dwb and publish beta 2024-06-21 15:51:32 -07:00
Koushik Dutta
40a38cfd31 webrtc: fix icloud private relay 2024-06-20 20:36:56 -07:00
Koushik Dutta
d2b39e8fa3 cloud: handle case where cloud token comes populated from desktop app with no server id. 2024-06-20 13:04:01 -07:00
Koushik Dutta
20101cda2e common: fix audio only rtp parsing 2024-06-20 10:31:28 -07:00
Koushik Dutta
c90724daa6 common: createActivityTimeout 2024-06-20 09:21:21 -07:00
Koushik Dutta
fedb22fab2 common: fix rtp forwarder in some cases when audio does not exist 2024-06-19 20:20:59 -07:00
Koushik Dutta
994f1974d7 Merge branch 'main' of github.com:koush/scrypted 2024-06-19 20:03:30 -07:00
Koushik Dutta
d648fe552d common: fix double rtp packet delivery in some cases 2024-06-19 20:03:23 -07:00
Koushik Dutta
ccafff28cd cloud: update deps and publish 2024-06-19 09:13:29 -07:00
Koushik Dutta
3da49d47af cloud: fix cloudflared exit 2024-06-19 08:37:20 -07:00
George Talusan
e1918cfa89 reolink: add ONVIF objection detection for cameras that can support it (#1502) 2024-06-19 08:00:42 -07:00
Koushik Dutta
7b19204d77 webrtc: fixup for intercom? 2024-06-18 20:20:19 -07:00
Koushik Dutta
5dac1de87e rebroadcast/common: h265 keyframe search 2024-06-18 20:18:59 -07:00
Koushik Dutta
c9a2474f17 Merge branch 'main' of github.com:koush/scrypted 2024-06-18 13:56:12 -07:00
Koushik Dutta
e5d9d0d054 rebroadcast: handle basic h265 idr parsing 2024-06-18 13:56:08 -07:00
Koushik Dutta
1272582510 snapshot: log http endpoit errors 2024-06-18 11:03:33 -07:00
Koushik Dutta
51271a0e02 unif-protect: id remapping fix 2024-06-18 11:03:08 -07:00
Koushik Dutta
9b32952a22 Merge branch 'main' of github.com:koush/scrypted 2024-06-18 10:04:54 -07:00
Koushik Dutta
5b92aea54b webrtc: fix google device access leaks? 2024-06-18 10:04:48 -07:00
Brett Jia
61b59f4ca0 rknn: use numpy < 2.0.0 (#1500) 2024-06-16 18:38:34 -07:00
Koushik Dutta
93f8f43de2 onnx: derp 2024-06-16 16:53:45 -07:00
Koushik Dutta
dc88e0b07f detect: pin numpy to prevent usage of 2.0 2024-06-16 16:50:44 -07:00
Koushik Dutta
14a9f953a9 coreml: lock numpy, new 2.0 breaks stuff 2024-06-16 13:18:46 -07:00
George Talusan
528885d5e2 reolink: save GetAiState to hasObjectDetector so object detector can work (#1497) 2024-06-15 12:19:07 -07:00
Koushik Dutta
e779f37689 proxmox: no one can read 2024-06-14 07:54:26 -07:00
Koushik Dutta
c6c2a8dc49 predict: prevent face recognition on low score faces 2024-06-10 22:11:19 -07:00
Koushik Dutta
d8d2fd25cd predict: fix batch error reporting 2024-06-10 16:02:50 -07:00
Koushik Dutta
301a5b6685 predict: increase face recognition threshold 2024-06-10 13:10:08 -07:00
Koushik Dutta
2a4bac42ed core: opencl update 2024-06-07 13:18:36 -07:00
Koushik Dutta
f55cadedb5 linux: update intel opencl debs 2024-06-07 13:14:48 -07:00
Koushik Dutta
dd9ff45b21 predict: widen text input size from 256 to 384 2024-06-07 10:26:51 -07:00
Koushik Dutta
a0aada2f03 Merge branch 'main' of github.com:koush/scrypted 2024-06-06 10:26:07 -07:00
Koushik Dutta
8499843f31 predict: use locked pillow version 2024-06-06 10:26:01 -07:00
Koushik Dutta
672a33b93b ha: Update config.yaml 2024-06-05 22:56:02 -07:00
Koushik Dutta
f9a744c7dc core: make login 1 week 2024-06-05 21:37:16 -07:00
Koushik Dutta
5b124013b7 predict: fix lpr deskew bugs 2024-06-05 11:14:31 -07:00
Koushik Dutta
d2f1c69e98 predict: shuffle common files out of tflite 2024-06-05 08:30:39 -07:00
Koushik Dutta
2a2f96a771 predict: use smaller image for face detect 2024-06-04 22:15:43 -07:00
Koushik Dutta
dc9b5f447e postrelease 2024-06-04 15:55:09 -07:00
Koushik Dutta
1fb0c01e7e postbeta 2024-06-04 15:53:17 -07:00
Koushik Dutta
014d7b35ac server: ensure plugins get restarted if failing during reload 2024-06-04 15:53:04 -07:00
Koushik Dutta
b08267dab0 server: beta 2024-06-04 13:59:58 -07:00
Koushik Dutta
97d78516f2 postbeta 2024-06-04 13:59:53 -07:00
Koushik Dutta
360c2437c1 postbeta 2024-06-04 13:26:57 -07:00
Koushik Dutta
0b230bfc74 Merge branch 'main' of github.com:koush/scrypted 2024-06-04 12:58:12 -07:00
Koushik Dutta
d25dc8d266 postbeta 2024-06-04 12:57:01 -07:00
Koushik Dutta
5f4d1e99cd postbeta 2024-06-04 12:43:17 -07:00
Koushik Dutta
ee38ef7817 Update bug_report.md 2024-06-04 08:32:06 -07:00
Koushik Dutta
80af38d3e1 Merge branch 'main' of github.com:koush/scrypted 2024-06-03 23:34:05 -07:00
Koushik Dutta
2f19866f05 predict: relax face threshold 2024-06-03 23:34:01 -07:00
Long Zheng
cf1c500e9d common: Enable TypeScript strict for packages/auth-fetch (#1493)
* Add tsconfig strict to packages/auth-fetch

* Refactor switch case

* Revert "Refactor switch case"

This reverts commit b5004664bb.

* Revert switch changes
2024-06-03 17:48:38 -07:00
Koushik Dutta
9a770e9dc9 predict: update models 2024-06-03 15:08:58 -07:00
Koushik Dutta
6dbb8863a0 Merge branch 'main' of github.com:koush/scrypted 2024-06-03 10:38:44 -07:00
Koushik Dutta
5eac8d0ab9 predict: lock opencv version,
roll back to 9c flt
2024-06-03 10:38:36 -07:00
Long Zheng
272bad8f29 cli: Enable TypeScript strict for packages/cli (#1494)
* Enable strict mode on packages/cli

* Fix condition
2024-06-03 10:34:18 -07:00
Koushik Dutta
83a3352862 predict: extract rough text scores 2024-06-02 13:33:54 -07:00
Koushik Dutta
4d5a693208 core: add labels to detection preview 2024-06-02 08:07:48 -07:00
Koushik Dutta
70e7f944c0 postrelease 2024-06-01 22:02:17 -07:00
Koushik Dutta
5a52c03a3d postrelease 2024-06-01 20:09:34 -07:00
Koushik Dutta
f9f597ef01 server: guard entire plugin load block 2024-06-01 13:07:55 -07:00
Koushik Dutta
2e07788c0c server: log plugin load failure 2024-06-01 13:05:56 -07:00
Koushik Dutta
9c0fbc1cb6 common: listenZeroSingleClient configurable timeout 2024-06-01 09:44:51 -07:00
Koushik Dutta
239d49899d unifi-protect: fix id remapping 2024-06-01 09:19:32 -07:00
Koushik Dutta
2d3589b5a3 unifi-protect: fix id remapping 2024-06-01 08:49:37 -07:00
Koushik Dutta
96ec465a38 unifi: more logging 2024-06-01 08:07:24 -07:00
Koushik Dutta
5bb6b87c7d predict: yolov10m 2024-05-31 15:17:24 -07:00
Koushik Dutta
fcfedccaf8 postrelease 2024-05-31 14:01:24 -07:00
Koushik Dutta
98373833fd postrelease 2024-05-31 13:38:43 -07:00
Brett Jia
03588be125 rknn: use correct nativeId for text recognition (#1492) 2024-05-31 13:24:18 -07:00
Koushik Dutta
cdd81daec5 Merge branch 'main' of github.com:koush/scrypted 2024-05-31 10:49:12 -07:00
Koushik Dutta
d64f90c0c8 predict: republish with smaller plate/face models. fix openvino thread bugs 2024-05-31 10:49:08 -07:00
Brett Jia
ec31dee36e onnx: fix text recognition thread names (#1491) 2024-05-31 09:56:18 -07:00
Brett Jia
11f2e88590 rknn: add text recognition (#1490)
* rknn: add text recognition

* disable verbose
2024-05-31 09:56:09 -07:00
Koushik Dutta
bf51ddb2d5 server: checks to ensure plugin restart doesnt ignore zombie states 2024-05-31 08:26:20 -07:00
Koushik Dutta
26000f1828 predict: yolov10 2024-05-30 09:55:28 -07:00
Koushik Dutta
f65485af97 Merge remote-tracking branch 'origin/main' into rebroadcast 2024-05-30 09:37:02 -07:00
Koushik Dutta
72c5690d05 rebroadcast: beta 2024-05-30 09:29:48 -07:00
Koushik Dutta
e076d61122 rebroadcast: fixup reverts 2024-05-30 09:29:14 -07:00
Koushik Dutta
7071808514 Revert "rebroadcast: parser perf refactor"
This reverts commit f677cf7393.
2024-05-30 09:27:27 -07:00
Koushik Dutta
1e2fd46cd3 Revert "rebroadcast: more parser refactor"
This reverts commit 5432b5b917.
2024-05-30 09:24:53 -07:00
Koushik Dutta
e3cdd4326f videoanalysis: label scores 2024-05-30 09:21:07 -07:00
Koushik Dutta
227f932ad8 coreml: yolov10 2024-05-30 09:20:53 -07:00
Koushik Dutta
67cec188ce docker: fix partition detection 2024-05-30 07:49:38 -07:00
Koushik Dutta
1ee276185e sdk: label score 2024-05-28 21:59:59 -07:00
Brett Jia
42ed855b05 actions: replace local install test with setup-scrypted action (#1488)
* actions: replace local install test with setup-scrypted action

* update

* extract server version from package.json

* use package-lock.json
2024-05-28 12:59:13 -07:00
Jonathan Yip
93da4eed30 docker: Add security_opt to allow container to talk to host avahi daemon (#1487) 2024-05-28 09:21:31 -07:00
Long Zheng
a72a596578 homekit: Homekit camera close recording tweaks (#1486)
* Change throw to log

Throw will not work since the `handleFragmentsRequests` async generator is already closed/finished by HAP

* Move isOpen check

HAP still requests fragment after closing the recording stream. Skip processing it.

* Change catch message

* Add another !isOpen in case race condition with await
2024-05-27 10:12:00 -07:00
Brett Jia
72663dd68c installer: allow specifying exact server version to install (#1485)
* Update install-scrypted-dependencies-mac.sh

* Update install-scrypted-dependencies-linux.sh

* Update install-scrypted-dependencies-win.ps1

* Update install-scrypted-dependencies-win.ps1

* Update install-scrypted-dependencies-win.ps1

* Update install-scrypted-dependencies-win.ps1
2024-05-26 12:51:02 -07:00
Koushik Dutta
108d57dbdd Merge remote-tracking branch 'origin/main' into rebroadcast 2024-05-26 09:06:54 -07:00
Brett Jia
bc71fd8515 server: print python interpreter path (#1484) 2024-05-25 22:29:46 -07:00
Koushik Dutta
a51070767b homekit: change default advertiser back to ciao due to issues. use identifying material 2024-05-25 19:26:21 -07:00
Koushik Dutta
269cc4dbc9 rebroadcast: beta 2024-05-24 22:43:18 -07:00
Koushik Dutta
684961fa4b openvino: types 2024-05-24 22:43:11 -07:00
Koushik Dutta
4f60b7e379 sdk: update 2024-05-24 22:42:48 -07:00
Koushik Dutta
5d72061151 ha: publish 2024-05-21 09:19:43 -07:00
Brett Jia
f2c940c1d3 server: add SCRYPTED_COMPATIBILITY_FILE (#1479) 2024-05-19 13:38:57 -07:00
Koushik Dutta
7e817b0b30 rebroadcast: further removal of legacy code 2024-05-19 11:22:10 -07:00
Brett Jia
75bb15d3b7 Revert "server: make fetching network interfaces optional (#1474)" (#1478)
This reverts commit 0160502da8.
2024-05-17 17:39:24 -07:00
Koushik Dutta
ba1a1eff67 onnx: report device in use 2024-05-17 09:08:07 -07:00
Koushik Dutta
5432b5b917 rebroadcast: more parser refactor 2024-05-16 22:33:23 -07:00
Koushik Dutta
f677cf7393 rebroadcast: parser perf refactor 2024-05-15 14:17:06 -07:00
Koushik Dutta
bdf9278131 rebroadcast: initial pass and removing legacy parsers 2024-05-15 10:03:26 -07:00
Koushik Dutta
0ae93a9c3f cli: publish 2024-05-15 09:24:18 -07:00
Long Zheng
72422cdd8b windows: Fix Windows server install with installDir containing space (#1471)
* Fix server install with installDir containing space

* Revert "Fix server install with installDir containing space"

This reverts commit b99ccd3c3d.

* Alternate fix by wrapping each runCommand arg in a quote for Windows
2024-05-15 09:23:05 -07:00
Koushik Dutta
390d1b3329 onnx: add windows cuda support 2024-05-14 15:18:17 -07:00
Koushik Dutta
024e99766a amcrest: fix legacy boundary https://github.com/koush/scrypted/issues/1475 2024-05-14 15:06:21 -07:00
Brett Jia
0160502da8 server: make fetching network interfaces optional (#1474) 2024-05-14 13:40:12 -07:00
Koushik Dutta
f0d65982de postrelease 2024-05-13 19:31:55 -07:00
Koushik Dutta
1445933bd4 postbeta 2024-05-13 17:40:33 -07:00
Koushik Dutta
508f31c254 core: update intel opencl in lxc 2024-05-13 17:36:33 -07:00
Koushik Dutta
fd1aa10a2a postbeta 2024-05-13 17:08:32 -07:00
Koushik Dutta
fceed68d75 postbeta 2024-05-13 15:49:05 -07:00
Koushik Dutta
955e780c64 docker: fix missing intel dep 2024-05-13 13:22:37 -07:00
Koushik Dutta
452fe20e8f docker/lxc: update intel graphics install script 2024-05-13 12:44:36 -07:00
Koushik Dutta
9083e16cdb postbeta 2024-05-13 10:22:09 -07:00
Koushik Dutta
840a278e5d server: add methods to manage plugin engine.io connections 2024-05-13 10:21:43 -07:00
Koushik Dutta
6d036dbd60 server: fix python runtime worker setup 2024-05-13 10:21:29 -07:00
Koushik Dutta
d5ba6f34d6 onnx: cleanup 2024-05-13 10:00:53 -07:00
Koushik Dutta
0321846c22 storage-settings/videoanalysis: fix default value of 0 2024-05-12 21:43:55 -07:00
Koushik Dutta
714747fcee videoanalysis: fix bug 2024-05-12 21:20:37 -07:00
Koushik Dutta
e3906da3c4 videoanalysis: new option to reset smart motion sensor only when motion stops 2024-05-12 21:17:41 -07:00
Koushik Dutta
820ef70033 predict plugins: refactor recog, add onnx, fix spurious model leaks 2024-05-12 21:00:48 -07:00
Koushik Dutta
0c95f5c052 tapo: fix 2 way audio on some models 2024-05-11 19:11:58 -07:00
Koushik Dutta
4cfd7c4362 tapo: fix 2 way audio on some models 2024-05-11 19:11:42 -07:00
Koushik Dutta
1e8126dec8 common: header casing preservation 2024-05-11 19:11:30 -07:00
Koushik Dutta
d3fbc58736 openvino: fix yolo nas labels 2024-05-11 12:05:02 -07:00
Koushik Dutta
46113744b3 dev: fix setup script 2024-05-11 10:07:18 -07:00
Koushik Dutta
3947624ae0 openvino: rollback to stable openvino 2024-05-11 09:07:32 -07:00
Koushik Dutta
4ac5ded012 openvino/onnx: publish yolo nas 2024-05-10 20:47:41 -07:00
Koushik Dutta
aadfacf50a Merge branch 'main' of github.com:koush/scrypted 2024-05-10 19:46:01 -07:00
Koushik Dutta
bb1e0ac82b coreml: yolo-nas 2024-05-10 19:45:56 -07:00
Koushik Dutta
23a15a1533 docker: mount changes may need systemctl daemon-reload 2024-05-09 12:59:36 -07:00
Koushik Dutta
01dd480c01 Merge branch 'main' of github.com:koush/scrypted 2024-05-08 20:33:17 -07:00
Koushik Dutta
364cae3273 openvino: update pypi 2024-05-08 20:33:12 -07:00
Koushik Dutta
8a986ab707 windows: -y on choco install 2024-05-08 07:56:31 -07:00
Koushik Dutta
ca96959de8 openvino/onnx: move prep into separate threads 2024-05-07 20:42:06 -07:00
Koushik Dutta
2f0ae9ef50 snapshot: fix weird npm cache bug 2024-05-07 20:26:59 -07:00
Koushik Dutta
8b84bac2c2 snapshot/homekit: fix stale snapshots 2024-05-07 20:22:45 -07:00
Koushik Dutta
976ed7f1a5 onnx: multiple gpu support 2024-05-07 11:26:56 -07:00
Koushik Dutta
b4e6821da8 tensorflow-lite: use dict vs queue for perf 2024-05-07 10:52:06 -07:00
Koushik Dutta
540b990a08 ha: Update config.yaml 2024-05-06 18:26:18 -07:00
Koushik Dutta
ce75b072da various: publish 2024-05-06 16:31:37 -07:00
Greg Thornton
5bca9b7156 homekit: fix late 2way setup (#1461)
* homekit: fix late 2way setup

* homekit: use 2-way state rather than playing/intitializing
2024-05-06 16:28:38 -07:00
Greg Thornton
ae4914346b webrtc: don't store audio codec before intercom start (#1462) 2024-05-06 14:08:47 -07:00
slyoldfox
b593209558 Fix bticino intercom, clean up some dependencies (#1463)
* Allow setting the DEVADDR option

* Fix intercom by using startRtpForwarderProcess()
Clean up some dependencies

* Allow logging errors in sip-manager
Cleanup bloated and barely used dependencies
2024-05-06 13:07:30 -07:00
Koushik Dutta
9df399708f postrelease 2024-05-04 13:07:02 -07:00
173 changed files with 6009 additions and 3305 deletions

View File

@@ -13,11 +13,11 @@ Before opening an issue, view the device's Console logs in the Scrypted Manageme
**DO NOT OPEN ISSUES FOR ANY OF THE FOLLOWING:**
* Server setup assistance. Use Discord, Reddit, or Github Discussions.
* Hardware setup assistance. Use Discord, Reddit, or Github Discussions.
* Server or hardware setup assistance. Use Discord, Reddit, or Github Discussions.
* Feature Requests. Use Discord, Reddit, or Github Discussions.
* Packet loss in your camera logs. This is wifi/network congestion.
* HomeKit weirdness. See HomeKit troubleshooting guide.
* Release schedules or timelines. Releases are rolled out unevenly across the different server platforms.
However, if something **was working**, and is now **no longer working**, you may create a Github issue.
Created issues that do not meet these requirements or are improperly filled out will be immediately closed.

View File

@@ -9,52 +9,28 @@ on:
workflow_dispatch:
jobs:
test_linux_local:
name: Test Linux local installation
runs-on: ubuntu-latest
test_local:
name: Test local installation on ${{ matrix.runner }}
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
runner: [ubuntu-latest, macos-14, macos-13, windows-latest]
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Run install script
- name: Parse latest server release
id: parse_server
shell: bash
run: |
cat ./install/local/install-scrypted-dependencies-linux.sh | sudo SERVICE_USER=$USER bash
- name: Test server is running
run: |
systemctl status scrypted.service
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
test_mac_local:
name: Test Mac local installation
runs-on: macos-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Run install script
run: |
mkdir -p ~/.scrypted
bash ./install/local/install-scrypted-dependencies-mac.sh
- name: Test server is running
run: |
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
test_windows_local:
name: Test Windows local installation
runs-on: windows-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Run install script
run: |
.\install\local\install-scrypted-dependencies-win.ps1
- name: Test server is running
run: |
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
VERSION=$(cat ./server/package-lock.json | jq -r '.version')
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "Will test @scrypted/server@$VERSION"
- name: Install scrypted server
uses: scryptedapp/setup-scrypted@v0.0.2
with:
branch: ${{ github.sha }}
version: ${{ steps.parse_server.outputs.version }}

103
common/package-lock.json generated
View File

@@ -74,7 +74,7 @@
},
"../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.4",
"version": "0.3.29",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -111,64 +111,57 @@
},
"../server": {
"name": "@scrypted/server",
"version": "0.82.0",
"version": "0.106.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",
"@scrypted/types": "^0.3.4",
"adm-zip": "^0.5.10",
"@scrypted/ffmpeg-static": "^6.1.0-build1",
"@scrypted/node-pty": "^1.0.10",
"@scrypted/types": "^0.3.28",
"adm-zip": "^0.5.12",
"body-parser": "^1.20.2",
"cookie-parser": "^1.4.6",
"debug": "^4.3.4",
"dotenv": "^16.4.5",
"engine.io": "^6.5.4",
"express": "^4.18.2",
"ffmpeg-static": "^5.2.0",
"follow-redirects": "^1.15.4",
"express": "^4.19.2",
"follow-redirects": "^1.15.6",
"http-auth": "^4.2.0",
"ip": "^1.1.8",
"level": "^8.0.0",
"linkfs": "^2.1.0",
"ip": "^2.0.1",
"level": "^8.0.1",
"lodash": "^4.17.21",
"memfs": "^4.6.0",
"mime": "^3.0.0",
"nan": "^2.18.0",
"nan": "^2.19.0",
"node-dijkstra": "^2.5.0",
"node-forge": "^1.3.1",
"node-gyp": "^10.0.1",
"node-gyp": "^10.1.0",
"py": "npm:@bjia56/portable-python@^0.1.31",
"router": "^1.3.8",
"semver": "^7.5.4",
"sharp": "^0.33.1",
"semver": "^7.6.2",
"sharp": "^0.33.3",
"source-map-support": "^0.5.21",
"tar": "^6.2.0",
"tar": "^7.1.0",
"tslib": "^2.6.2",
"typescript": "^5.3.3",
"typescript": "^5.4.5",
"whatwg-mimetype": "^4.0.0",
"ws": "^8.16.0"
"ws": "^8.17.0"
},
"bin": {
"scrypted-serve": "bin/scrypted-serve"
},
"devDependencies": {
"@types/adm-zip": "^0.5.5",
"@types/cookie-parser": "^1.4.6",
"@types/debug": "^4.1.12",
"@types/cookie-parser": "^1.4.7",
"@types/express": "^4.17.21",
"@types/follow-redirects": "^1.14.4",
"@types/http-auth": "^4.1.4",
"@types/ip": "^1.1.3",
"@types/lodash": "^4.14.202",
"@types/mime": "^3.0.4",
"@types/lodash": "^4.17.1",
"@types/node-dijkstra": "^2.5.6",
"@types/node-forge": "^1.3.10",
"@types/pem": "^1.14.4",
"@types/semver": "^7.5.6",
"@types/node-forge": "^1.3.11",
"@types/semver": "^7.5.8",
"@types/source-map-support": "^0.5.10",
"@types/tar": "^6.1.10",
"@types/whatwg-mimetype": "^3.0.2",
"@types/ws": "^8.5.10"
},
"optionalDependencies": {
"node-pty-prebuilt-multiarch": "^0.10.1-pre.5"
}
},
"node_modules/@cspotcode/source-map-support": {
@@ -453,53 +446,47 @@
"version": "file:../server",
"requires": {
"@mapbox/node-pre-gyp": "^1.0.11",
"@scrypted/types": "^0.3.4",
"@scrypted/ffmpeg-static": "^6.1.0-build1",
"@scrypted/node-pty": "^1.0.10",
"@scrypted/types": "^0.3.28",
"@types/adm-zip": "^0.5.5",
"@types/cookie-parser": "^1.4.6",
"@types/debug": "^4.1.12",
"@types/cookie-parser": "^1.4.7",
"@types/express": "^4.17.21",
"@types/follow-redirects": "^1.14.4",
"@types/http-auth": "^4.1.4",
"@types/ip": "^1.1.3",
"@types/lodash": "^4.14.202",
"@types/mime": "^3.0.4",
"@types/lodash": "^4.17.1",
"@types/node-dijkstra": "^2.5.6",
"@types/node-forge": "^1.3.10",
"@types/pem": "^1.14.4",
"@types/semver": "^7.5.6",
"@types/node-forge": "^1.3.11",
"@types/semver": "^7.5.8",
"@types/source-map-support": "^0.5.10",
"@types/tar": "^6.1.10",
"@types/whatwg-mimetype": "^3.0.2",
"@types/ws": "^8.5.10",
"adm-zip": "^0.5.10",
"adm-zip": "^0.5.12",
"body-parser": "^1.20.2",
"cookie-parser": "^1.4.6",
"debug": "^4.3.4",
"dotenv": "^16.4.5",
"engine.io": "^6.5.4",
"express": "^4.18.2",
"ffmpeg-static": "^5.2.0",
"follow-redirects": "^1.15.4",
"express": "^4.19.2",
"follow-redirects": "^1.15.6",
"http-auth": "^4.2.0",
"ip": "^1.1.8",
"level": "^8.0.0",
"linkfs": "^2.1.0",
"ip": "^2.0.1",
"level": "^8.0.1",
"lodash": "^4.17.21",
"memfs": "^4.6.0",
"mime": "^3.0.0",
"nan": "^2.18.0",
"nan": "^2.19.0",
"node-dijkstra": "^2.5.0",
"node-forge": "^1.3.1",
"node-gyp": "^10.0.1",
"node-pty-prebuilt-multiarch": "^0.10.1-pre.5",
"node-gyp": "^10.1.0",
"py": "npm:@bjia56/portable-python@^0.1.31",
"router": "^1.3.8",
"semver": "^7.5.4",
"sharp": "^0.33.1",
"semver": "^7.6.2",
"sharp": "^0.33.3",
"source-map-support": "^0.5.21",
"tar": "^6.2.0",
"tar": "^7.1.0",
"tslib": "^2.6.2",
"typescript": "^5.3.3",
"typescript": "^5.4.5",
"whatwg-mimetype": "^4.0.0",
"ws": "^8.16.0"
"ws": "^8.17.0"
}
},
"@tsconfig/node10": {

View File

@@ -0,0 +1,28 @@
export function createActivityTimeout(timeout: number, timeoutCallback: () => void) {
let dataTimeout: NodeJS.Timeout;
let lastTime = Date.now();
function resetActivityTimer() {
lastTime = Date.now();
}
function clearActivityTimer() {
clearInterval(dataTimeout);
}
if (timeout) {
dataTimeout = setInterval(() => {
if (Date.now() > lastTime + timeout) {
clearInterval(dataTimeout);
dataTimeout = undefined;
timeoutCallback();
}
}, timeout);
}
resetActivityTimer();
return {
resetActivityTimer,
clearActivityTimer,
}
}

View File

@@ -89,27 +89,44 @@ export const H264_NAL_TYPE_FU_B = 29;
export const H264_NAL_TYPE_MTAP16 = 26;
export const H264_NAL_TYPE_MTAP32 = 27;
export const H265_NAL_TYPE_AGG = 48;
export const H265_NAL_TYPE_VPS = 32;
export const H265_NAL_TYPE_SPS = 33;
export const H265_NAL_TYPE_PPS = 34;
export const H265_NAL_TYPE_IDR_N = 19;
export const H265_NAL_TYPE_IDR_W = 20;
export function findH264NaluType(streamChunk: StreamChunk, naluType: number) {
if (streamChunk.type !== 'h264')
return;
return findH264NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
}
export function findH265NaluType(streamChunk: StreamChunk, naluType: number) {
if (streamChunk.type !== 'h265')
return;
return findH265NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
}
export function parseH264NaluType(firstNaluByte: number) {
return firstNaluByte & 0x1f;
}
export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
const checkNaluType = nalu[0] & 0x1f;
const checkNaluType = parseH264NaluType(nalu[0]);
if (checkNaluType === H264_NAL_TYPE_STAP_A) {
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = nalu[pos] & 0x1f;
const stapaType = parseH264NaluType(nalu[pos]);
if (stapaType === naluType)
return nalu.subarray(pos, pos + naluLength);
pos += naluLength;
}
}
else if (checkNaluType === H264_NAL_TYPE_FU_A) {
const fuaType = nalu[1] & 0x1f;
const fuaType = parseH264NaluType(nalu[1]);
const isFuStart = !!(nalu[1] & 0x80);
if (fuaType === naluType && isFuStart)
@@ -121,39 +138,52 @@ export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
return;
}
function parseH265NaluType(firstNaluByte: number) {
return (firstNaluByte & 0b01111110) >> 1;
}
export function findH265NaluTypeInNalu(nalu: Buffer, naluType: number) {
const checkNaluType = parseH265NaluType(nalu[0]);
if (checkNaluType === H265_NAL_TYPE_AGG) {
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = parseH265NaluType(nalu[pos]);
if (stapaType === naluType)
return nalu.subarray(pos, pos + naluLength);
pos += naluLength;
}
}
else if (checkNaluType === naluType) {
return nalu;
}
return;
}
export function getNaluTypes(streamChunk: StreamChunk) {
if (streamChunk.type !== 'h264')
return new Set<number>();
return getNaluTypesInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
}
export function getNaluFragmentInformation(nalu: Buffer) {
const naluType = nalu[0] & 0x1f;
const fua = naluType === H264_NAL_TYPE_FU_A;
return {
fua,
fuaStart: fua && !!(nalu[1] & 0x80),
fuaEnd: fua && !!(nalu[1] & 0x40),
}
}
export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
const ret = new Set<number>();
const naluType = nalu[0] & 0x1f;
const naluType = parseH264NaluType(nalu[0]);
if (naluType === H264_NAL_TYPE_STAP_A) {
ret.add(H264_NAL_TYPE_STAP_A);
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = nalu[pos] & 0x1f;
const stapaType = parseH264NaluType(nalu[pos]);
ret.add(stapaType);
pos += naluLength;
}
}
else if (naluType === H264_NAL_TYPE_FU_A) {
ret.add(H264_NAL_TYPE_FU_A);
const fuaType = nalu[1] & 0x1f;
const fuaType = parseH264NaluType(nalu[1]);
if (fuaRequireStart) {
const isFuStart = !!(nalu[1] & 0x80);
if (isFuStart)
@@ -175,6 +205,33 @@ export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaReq
return ret;
}
export function getH265NaluTypes(streamChunk: StreamChunk) {
if (streamChunk.type !== 'h265')
return new Set<number>();
return getNaluTypesInH265Nalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
}
export function getNaluTypesInH265Nalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
const ret = new Set<number>();
const naluType = parseH265NaluType(nalu[0]);
if (naluType === H265_NAL_TYPE_AGG) {
ret.add(H265_NAL_TYPE_AGG);
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = parseH265NaluType(nalu[pos]);
ret.add(stapaType);
pos += naluLength;
}
}
else {
ret.add(naluType);
}
return ret;
}
export function createRtspParser(options?: StreamParserOptions): RtspStreamParser {
let resolve: any;
@@ -195,12 +252,23 @@ export function createRtspParser(options?: StreamParserOptions): RtspStreamParse
findSyncFrame(streamChunks: StreamChunk[]) {
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
const streamChunk = streamChunks[prebufferIndex];
if (streamChunk.type !== 'h264') {
continue;
if (streamChunk.type === 'h264') {
const naluTypes = getNaluTypes(streamChunk);
if (naluTypes.has(H264_NAL_TYPE_SPS) || naluTypes.has(H264_NAL_TYPE_IDR)) {
return streamChunks.slice(prebufferIndex);
}
}
else if (streamChunk.type === 'h265') {
const naluTypes = getH265NaluTypes(streamChunk);
if (findH264NaluType(streamChunk, H264_NAL_TYPE_SPS) || findH264NaluType(streamChunk, H264_NAL_TYPE_IDR)) {
return streamChunks.slice(prebufferIndex);
if (naluTypes.has(H265_NAL_TYPE_VPS)
|| naluTypes.has(H265_NAL_TYPE_SPS)
|| naluTypes.has(H265_NAL_TYPE_PPS)
|| naluTypes.has(H265_NAL_TYPE_IDR_N)
|| naluTypes.has(H265_NAL_TYPE_IDR_W)
) {
return streamChunks.slice(prebufferIndex);
}
}
}
@@ -540,6 +608,7 @@ export class RtspClient extends RtspBase {
throw new Error('no WWW-Authenticate found');
const { BASIC } = await import('http-auth-utils');
// @ts-ignore
const { parseHTTPHeadersQuotedKeyValueSet } = await import('http-auth-utils/dist/utils');
if (this.wwwAuthenticate.includes('Basic')) {

View File

@@ -1,6 +1,6 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "20-jammy-full.s6-v0.99.0"
version: "v0.111.0-jammy-full"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"

View File

@@ -35,7 +35,7 @@ services:
# Avahi can be used for network discovery by passing in the host daemon
# or running the daemon inside the container. Choose one or the other.
# Uncomment next line to run avahi-daemon inside the container.
# See volumes section below to use the host daemon.
# See volumes and security_opt section below to use the host daemon.
# - SCRYPTED_DOCKER_AVAHI=true
# NVIDIA (Part 1 of 4)
@@ -71,11 +71,16 @@ services:
# Ensure Avahi is running on the host machine:
# It can be installed with: sudo apt-get install avahi-daemon
# This is not compatible with running avahi inside the container (see above).
# Also, uncomment the lines under security_opt
# - /var/run/dbus:/var/run/dbus
# - /var/run/avahi-daemon/socket:/var/run/avahi-daemon/socket
# Default volume for the Scrypted database. Typically should not be changed.
- ~/.scrypted/volume:/server/volume
# Uncomment the following lines to use Avahi daemon from the host
# Without this, AppArmor will block the container's attempt to talk to Avahi via dbus
# security_opt:
# - apparmor:unconfined
devices: [
# uncomment the common systems devices to pass
# them through to docker.

View File

@@ -1,13 +1,35 @@
if [ "$(uname -m)" = "x86_64" ]
then
echo "Installing Intel graphics packages."
apt-get update && apt-get install -y gpg-agent &&
rm -f /usr/share/keyrings/intel-graphics.gpg &&
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
apt-get -y update &&
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free &&
# this script previvously apt install intel-media-va-driver-non-free, but that seems to no longer be necessary.
# the intel provided script is disabled since it does not work with the 6.8 kernel in Ubuntu 24.04 or Proxmox 8.2.
# manual installation of the Intel graphics stuff is required.
# echo "Installing Intel graphics packages."
# apt-get update && apt-get install -y gpg-agent &&
# rm -f /usr/share/keyrings/intel-graphics.gpg &&
# curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
# echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
# apt-get -y update &&
# apt-get -y install intel-opencl-icd &&
# apt-get -y dist-upgrade;
# manual installation
# https://github.com/intel/compute-runtime/releases/tag/24.13.29138.7
rm -rf /tmp/neo && mkdir -p /tmp/neo && cd /tmp/neo &&
apt-get install -y ocl-icd-libopencl1 &&
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16695.4/intel-igc-core_1.0.16695.4_amd64.deb &&
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16695.4/intel-igc-opencl_1.0.16695.4_amd64.deb &&
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-level-zero-gpu-dbgsym_1.3.29377.6_amd64.ddeb &&
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-level-zero-gpu_1.3.29377.6_amd64.deb &&
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-opencl-icd-dbgsym_24.17.29377.6_amd64.ddeb &&
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-opencl-icd_24.17.29377.6_amd64.deb &&
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/libigdgmm12_22.3.19_amd64.deb &&
dpkg -i *.deb &&
cd /tmp && rm -rf /tmp/neo &&
apt-get -y dist-upgrade;
exit $?
else
echo "Intel graphics will not be installed on this architecture."

View File

@@ -61,6 +61,8 @@ then
sudo apt-get -y install avahi-daemon
sed -i 's/'#' - \/var\/run\/dbus/- \/var\/run\/dbus/g' $DOCKER_COMPOSE_YML
sed -i 's/'#' - \/var\/run\/avahi-daemon/- \/var\/run\/avahi-daemon/g' $DOCKER_COMPOSE_YML
sed -i 's/'#' security_opt:/security_opt:/g' $DOCKER_COMPOSE_YML
sed -i 's/'#' - apparmor:unconfined/ - apparmor:unconfined/g' $DOCKER_COMPOSE_YML
fi
echo "Setting permissions on $SCRYPTED_HOME"

View File

@@ -72,6 +72,7 @@ function removescryptedfstab() {
grep -v "scrypted-nvr" /etc/fstab > /tmp/fstab && cp /tmp/fstab /etc/fstab
# ensure newline
sed -i -e '$a\' /etc/fstab
systemctl daemon-reload
}
BLOCK_DEVICE="/dev/$1"
@@ -95,7 +96,17 @@ then
set +e
sync
mkfs -F -t ext4 "$BLOCK_DEVICE"1
PARTITION_DEVICE="$BLOCK_DEVICE"1
if [ ! -e "$PARTITION_DEVICE" ]
then
PARTITION_DEVICE="$BLOCK_DEVICE"p1
if [ ! -e "$PARTITION_DEVICE" ]
then
echo "Unable to determine block device partition from block device: $BLOCK_DEVICE"
exit 1
fi
fi
mkfs -F -t ext4 "$PARTITION_DEVICE"
sync
# parse/evaluate blkid line as env vars
@@ -119,6 +130,7 @@ then
mkdir -p /mnt/scrypted-nvr
echo "PARTLABEL=scrypted-nvr /mnt/scrypted-nvr ext4 defaults,nofail 0 0" >> /etc/fstab
mount -a
systemctl daemon-reload
set +e
DIR="/mnt/scrypted-nvr"

View File

@@ -97,7 +97,7 @@ echo "docker compose rm -rf"
sudo -u $SERVICE_USER docker rm -f /scrypted /scrypted-watchtower 2> /dev/null
echo "Installing Scrypted..."
RUN sudo -u $SERVICE_USER npx -y scrypted@latest install-server
RUN sudo -u $SERVICE_USER npx -y scrypted@latest install-server $SCRYPTED_INSTALL_VERSION
cat > /etc/systemd/system/scrypted.service <<EOT

View File

@@ -121,7 +121,7 @@ then
fi
echo "Installing Scrypted..."
RUN $NPX_PATH -y scrypted@latest install-server
RUN $NPX_PATH -y scrypted@latest install-server $SCRYPTED_INSTALL_VERSION
cat > ~/Library/LaunchAgents/app.scrypted.server.plist <<EOT
<?xml version="1.0" encoding="UTF-8"?>

View File

@@ -11,7 +11,7 @@ iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/in
choco upgrade -y nodejs-lts --version=20.11.1
# Install VC Redist, which is necessary for portable python
choco install vcredist140
choco install -y vcredist140
# TODO: remove python install, and use portable python
# Install Python
@@ -26,7 +26,12 @@ $env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";"
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install --upgrade pip
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install debugpy typing_extensions typing opencv-python
npx -y scrypted@latest install-server
$SCRYPTED_INSTALL_VERSION=[System.Environment]::GetEnvironmentVariable("SCRYPTED_INSTALL_VERSION","User")
if ($SCRYPTED_INSTALL_VERSION -eq $null) {
npx -y scrypted@latest install-server
} else {
npx -y scrypted@latest install-server $SCRYPTED_INSTALL_VERSION
}
$USER_HOME_ESCAPED = $env:USERPROFILE.replace('\', '\\')
$SCRYPTED_HOME = $env:USERPROFILE + '\.scrypted'

View File

@@ -41,12 +41,19 @@ pct restore $VMID $SCRYPTED_TAR_ZST $@
if [ "$?" != "0" ]
then
echo ""
echo "pct restore failed"
echo "The Scrypted container installation failed (pct restore error)."
echo ""
echo "This may be caused by the server's 'local' storage not supporting containers."
echo "Try running this script again with a different storage device (local-lvm, local-zfs). For example:"
echo "This may be because the server's 'local' storage device is not being a valid"
echo "location for containers."
echo "Try running this script again with a different storage device like"
echo "'local-lvm' or 'local-zfs'."
echo ""
echo "#############################################################################"
echo "Paste the following command into this shell to install to local-lvm instead:"
echo ""
echo "bash $0 --storage local-lvm"
echo "#############################################################################"
echo ""
echo ""
exit 1
fi

View File

@@ -1,10 +1,4 @@
#!/bin/bash
echo 'if (!process.version.startsWith("v18")) throw new Error("Node 18 is required. Install Node Version Manager (nvm) for versioned node installations. See https://github.com/koush/scrypted/pull/498#issuecomment-1373854020")' | node
if [ "$?" != 0 ]
then
exit
fi
echo ######################################
echo "Setting up popular plugins."
echo "Additional will need npm install manually."
@@ -15,7 +9,7 @@ cd $(dirname $0)
git submodule init
git submodule update
for directory in sdk common server packages/client packages/auth-fetch
for directory in sdk server common packages/client packages/auth-fetch
do
echo "$directory > npm install"
pushd $directory

View File

@@ -1,4 +1,4 @@
import { HttpFetchOptions, HttpFetchResponseType, checkStatus, fetcher, getFetchMethod, setDefaultHttpFetchAccept } from '../../../server/src/fetch';
import { HttpFetchOptions, HttpFetchResponseType, checkStatus, createHeadersArray, fetcher, getFetchMethod, hasHeader, setDefaultHttpFetchAccept, setHeader } from '../../../server/src/fetch';
export interface AuthFetchCredentialState {
username: string;
@@ -70,19 +70,19 @@ async function getAuth(options: AuthFetchOptions, url: string | URL, method: str
export function createAuthFetch<B, M>(
h: fetcher<B, M>,
parser: (body: M, responseType: HttpFetchResponseType) => Promise<any>
parser: (body: M, responseType: HttpFetchResponseType | undefined) => Promise<any>
) {
const authHttpFetch = async <T extends HttpFetchOptions<B>>(options: T & AuthFetchOptions): ReturnType<typeof h<T>> => {
const method = getFetchMethod(options);
const headers = new Headers(options.headers);
const headers = createHeadersArray(options.headers);
options.headers = headers;
setDefaultHttpFetchAccept(headers, options.responseType);
const initialHeader = await getAuth(options, options.url, method);
// try to provide an authorization if a session exists, but don't override Authorization if provided already.
// 401 will trigger a proper auth.
if (initialHeader && !headers.has('Authorization'))
headers.set('Authorization', initialHeader);
if (initialHeader && !hasHeader(headers, 'Authorization'))
setHeader(headers, 'Authorization', initialHeader);
const initialResponse = await h({
...options,
@@ -99,7 +99,7 @@ export function createAuthFetch<B, M>(
};
}
let authenticateHeaders: string | string[] = initialResponse.headers.get('www-authenticate');
let authenticateHeaders: string | string[] | null = initialResponse.headers.get('www-authenticate');
if (!authenticateHeaders)
throw new Error('Did not find WWW-Authenticate header.');
@@ -126,7 +126,7 @@ export function createAuthFetch<B, M>(
const header = await getAuth(options, options.url, method);
if (header)
headers.set('Authorization', header);
setHeader(headers, 'Authorization', header);
return h(options);
}

View File

@@ -9,6 +9,7 @@
"inlineSources": true,
"declaration": true,
"resolveJsonModule": true,
"strict": true
},
"include": [
"src/**/*"

View File

@@ -1,16 +1,16 @@
{
"name": "scrypted",
"version": "1.3.15",
"version": "1.3.16",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "scrypted",
"version": "1.3.15",
"version": "1.3.16",
"license": "ISC",
"dependencies": {
"@scrypted/client": "^1.3.3",
"@scrypted/types": "^0.2.99",
"@scrypted/types": "^0.3.30",
"engine.io-client": "^6.5.3",
"readline-sync": "^1.4.10",
"semver": "^7.5.4",
@@ -101,15 +101,11 @@
"rimraf": "^5.0.5"
}
},
"node_modules/@scrypted/client/node_modules/@scrypted/types": {
"version": "0.3.4",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.3.4.tgz",
"integrity": "sha512-k/YMx8lIWOkePgXfKW9POr12mb+erFU2JKxO7TW92GyW8ojUWw9VOc0PK6O9bybi0vhsEnvMFkO6pO6bAonsVA=="
},
"node_modules/@scrypted/types": {
"version": "0.2.99",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.99.tgz",
"integrity": "sha512-2J1FH7tpAW5X3rgA70gJ+z0HFM90c/tBA+JXdP1vI1d/0yVmh9TSxnHoCuADN4R2NQXHmoZ6Nbds9kKAQ/25XQ=="
"version": "0.3.30",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.3.30.tgz",
"integrity": "sha512-1k+JVSR6WSNmE/5mLdqfrTmV3uRbvZp0OwKb8ikNi39ysBuC000tQGcEdXZqhYqRgWdhDTWtxXe9XsYoAZGKmA==",
"license": "ISC"
},
"node_modules/@socket.io/component-emitter": {
"version": "3.1.0",

View File

@@ -1,6 +1,6 @@
{
"name": "scrypted",
"version": "1.3.15",
"version": "1.3.16",
"description": "",
"main": "./dist/packages/cli/src/main.js",
"bin": {
@@ -17,7 +17,7 @@
"license": "ISC",
"dependencies": {
"@scrypted/client": "^1.3.3",
"@scrypted/types": "^0.2.99",
"@scrypted/types": "^0.3.30",
"engine.io-client": "^6.5.3",
"readline-sync": "^1.4.10",
"semver": "^7.5.4",

View File

@@ -160,11 +160,11 @@ async function main() {
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(await pendingResult, ScryptedMimeTypes.FFmpegInput);
if (ffmpegInput.url && ffmpegInput.urls?.[0]) {
const url = new URL(ffmpegInput.url);
if (url.hostname === '127.0.0.1' && ffmpegInput.urls?.[0]) {
ffmpegInput.inputArguments = ffmpegInput.inputArguments.map(i => i === ffmpegInput.url ? ffmpegInput.urls?.[0] : i);
if (url.hostname === '127.0.0.1' && ffmpegInput.urls?.[0] && ffmpegInput.inputArguments) {
ffmpegInput.inputArguments = ffmpegInput.inputArguments.map(i => i === ffmpegInput.url && ffmpegInput.urls ? ffmpegInput.urls?.[0] : i);
}
}
const args = [...ffmpegInput.inputArguments];
const args = ffmpegInput.inputArguments ? [...ffmpegInput.inputArguments] : [];
if (ffmpegInput.h264FilterArguments)
args.push(...ffmpegInput.h264FilterArguments);
console.log('ffplay', ...args);

View File

@@ -14,8 +14,12 @@ const EXIT_FILE = '.exit';
const UPDATE_FILE = '.update';
async function runCommand(command: string, ...args: string[]) {
if (os.platform() === 'win32')
if (os.platform() === 'win32') {
command += '.cmd';
// wrap each argument in a quote to handle spaces in paths
// https://github.com/nodejs/node/issues/38490#issuecomment-927330248
args = args.map(arg => '"' + arg + '"');
}
console.log('running', command, ...args);
const cp = child_process.spawn(command, args, {
stdio: 'inherit',
@@ -86,7 +90,13 @@ export async function installServe(installVersion: string, ignoreError?: boolean
const installJson = path.join(installDir, 'install.json');
try {
const { version } = JSON.parse(fs.readFileSync(installJson).toString());
if (semver.parse(process.version).major !== semver.parse(version).major)
const processSemver = semver.parse(process.version);
if (!processSemver)
throw new Error('error parsing process version');
const installSemver = semver.parse(version);
if (!installSemver)
throw new Error('error parsing install.json version');
if (processSemver.major !== installSemver.major)
throw new Error('mismatch');
}
catch (e) {
@@ -107,16 +117,32 @@ export async function installServe(installVersion: string, ignoreError?: boolean
}
export async function serveMain(installVersion?: string) {
let install = !!installVersion;
const options = ((): { install: true; version: string } | { install: false } => {
if (installVersion) {
console.log(`Installing @scrypted/server@${installVersion}`);
return {
install: true,
version: installVersion
};
}
if (!fs.existsSync('node_modules/@scrypted/server')) {
console.log('Package @scrypted/server not found. Installing.');
return {
install: true,
version: 'latest',
};
}
return {
install: false,
}
})();
const { installDir, volume } = cwdInstallDir();
if (!fs.existsSync('node_modules/@scrypted/server')) {
install = true;
installVersion ||= 'latest';
console.log('Package @scrypted/server not found. Installing.');
}
if (install) {
await installServe(installVersion, true);
if (options.install) {
await installServe(options.version, true);
}
// todo: remove at some point after core lxc updater rolls out.

View File

@@ -9,6 +9,7 @@
"inlineSources": true,
"declaration": true,
"moduleResolution": "Node16",
"strict": true
},
"include": [
"src/**/*"

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.150",
"version": "0.0.151",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.150",
"version": "0.0.151",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.150",
"version": "0.0.151",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -134,7 +134,7 @@ export interface AmcrestEventData {
export enum AmcrestEvent {
MotionStart = "Code=VideoMotion;action=Start",
MotionStop = "Code=VideoMotion;action=Stop",
MotionInfo = "Code=VideoMotionInfo;action=State",
MotionInfo = "Code=VideoMotionInfo;action=State",
AudioStart = "Code=AudioMutation;action=Start",
AudioStop = "Code=AudioMutation;action=Stop",
TalkInvite = "Code=_DoTalkAction_;action=Invite",
@@ -263,6 +263,8 @@ export class AmcrestCameraClient {
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
// amcrest may send "--myboundary" or "-- myboundary" (with a space)
const altBoundary = `-- ${boundary}`;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
@@ -286,7 +288,7 @@ export class AmcrestCameraClient {
this.console.log('ignoring dahua http body', body);
continue;
}
if (ignore !== boundary) {
if (ignore !== boundary && ignore !== altBoundary) {
this.console.error('expected boundary but found', ignore);
this.console.error(response.headers);
throw new Error('expected boundary');

View File

@@ -1,29 +1,26 @@
{
"name": "@scrypted/bticino",
"version": "0.0.15",
"version": "0.0.16",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/bticino",
"version": "0.0.15",
"version": "0.0.16",
"dependencies": {
"@slyoldfox/sip": "^0.0.6-1",
"sdp": "^3.0.3",
"stun": "^2.1.0",
"uuid": "^8.3.2"
"stun": "^2.1.0"
},
"devDependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^16.9.6",
"@types/uuid": "^8.3.4",
"cross-env": "^7.0.3",
"ts-node": "^10.9.1"
}
},
"../../common": {
"name": "@scrypted/common",
"version": "1.0.1",
"dev": true,
"license": "ISC",
@@ -39,8 +36,7 @@
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.14",
"version": "0.3.29",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -89,18 +85,18 @@
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz",
"integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.4.14",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
"integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==",
"version": "1.4.15",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==",
"dev": true
},
"node_modules/@jridgewell/trace-mapping": {
@@ -130,9 +126,9 @@
}
},
"node_modules/@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
"integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==",
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
"dev": true
},
"node_modules/@tsconfig/node12": {
@@ -148,27 +144,21 @@
"dev": true
},
"node_modules/@tsconfig/node16": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz",
"integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==",
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
"dev": true
},
"node_modules/@types/node": {
"version": "16.18.16",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.16.tgz",
"integrity": "sha512-ZOzvDRWp8dCVBmgnkIqYCArgdFOO9YzocZp8Ra25N/RStKiWvMOXHMz+GjSeVNe5TstaTmTWPucGJkDw0XXJWA==",
"dev": true
},
"node_modules/@types/uuid": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.4.tgz",
"integrity": "sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==",
"version": "16.18.96",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.96.tgz",
"integrity": "sha512-84iSqGXoO+Ha16j8pRZ/L90vDMKX04QTYMTfYeE1WrjWaZXuchBehGUZEpNgx7JnmlrIHdnABmpjrQjhCnNldQ==",
"dev": true
},
"node_modules/acorn": {
"version": "8.8.2",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
"integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==",
"version": "8.11.3",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
"integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
"dev": true,
"bin": {
"acorn": "bin/acorn"
@@ -178,9 +168,9 @@
}
},
"node_modules/acorn-walk": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz",
"integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==",
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz",
"integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==",
"dev": true,
"engines": {
"node": ">=0.4.0"
@@ -229,12 +219,18 @@
}
},
"node_modules/call-bind": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
"integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
"integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
"dependencies": {
"function-bind": "^1.1.1",
"get-intrinsic": "^1.0.2"
"es-define-property": "^1.0.0",
"es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"get-intrinsic": "^1.2.4",
"set-function-length": "^1.2.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -365,6 +361,22 @@
"node": ">=0.10"
}
},
"node_modules/define-data-property": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
"integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
"dependencies": {
"es-define-property": "^1.0.0",
"es-errors": "^1.3.0",
"gopd": "^1.0.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
@@ -382,6 +394,25 @@
"is-arrayish": "^0.2.1"
}
},
"node_modules/es-define-property": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
"integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
"dependencies": {
"get-intrinsic": "^1.2.4"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/filter-obj": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz",
@@ -402,9 +433,12 @@
}
},
"node_modules/function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/generate-function": {
"version": "2.3.1",
@@ -415,13 +449,29 @@
}
},
"node_modules/get-intrinsic": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
"integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
"integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
"dependencies": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
"has-symbols": "^1.0.3"
"es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"has-proto": "^1.0.1",
"has-symbols": "^1.0.3",
"hasown": "^2.0.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/gopd": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
"integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
"dependencies": {
"get-intrinsic": "^1.1.3"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -432,15 +482,26 @@
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
},
"node_modules/has": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
"node_modules/has-property-descriptors": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
"integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
"dependencies": {
"function-bind": "^1.1.1"
"es-define-property": "^1.0.0"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-proto": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
"integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
"engines": {
"node": ">= 0.4.0"
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-symbols": {
@@ -454,6 +515,17 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/hosted-git-info": {
"version": "2.8.9",
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
@@ -468,9 +540,9 @@
}
},
"node_modules/ip": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz",
"integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg=="
"version": "1.1.9",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz",
"integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ=="
},
"node_modules/ip2buf": {
"version": "2.0.0",
@@ -486,11 +558,11 @@
"integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg=="
},
"node_modules/is-core-module": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz",
"integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==",
"version": "2.13.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz",
"integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==",
"dependencies": {
"has": "^1.0.3"
"hasown": "^2.0.0"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -671,9 +743,9 @@
}
},
"node_modules/object-inspect": {
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
"version": "1.13.1",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
"integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -789,11 +861,11 @@
"integrity": "sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg=="
},
"node_modules/qs": {
"version": "6.11.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.1.tgz",
"integrity": "sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ==",
"version": "6.12.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz",
"integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==",
"dependencies": {
"side-channel": "^1.0.4"
"side-channel": "^1.0.6"
},
"engines": {
"node": ">=0.6"
@@ -865,11 +937,11 @@
}
},
"node_modules/resolve": {
"version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
"integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==",
"version": "1.22.8",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz",
"integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==",
"dependencies": {
"is-core-module": "^2.9.0",
"is-core-module": "^2.13.0",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
},
@@ -912,6 +984,22 @@
"semver": "bin/semver"
}
},
"node_modules/set-function-length": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
"integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
"dependencies": {
"define-data-property": "^1.1.4",
"es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"get-intrinsic": "^1.2.4",
"gopd": "^1.0.1",
"has-property-descriptors": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
@@ -934,13 +1022,17 @@
}
},
"node_modules/side-channel": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
"integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"dependencies": {
"call-bind": "^1.0.0",
"get-intrinsic": "^1.0.2",
"object-inspect": "^1.9.0"
"call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -961,9 +1053,9 @@
}
},
"node_modules/spdx-exceptions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz",
"integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A=="
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz",
"integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w=="
},
"node_modules/spdx-expression-parse": {
"version": "3.0.1",
@@ -975,9 +1067,9 @@
}
},
"node_modules/spdx-license-ids": {
"version": "3.0.13",
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz",
"integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w=="
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz",
"integrity": "sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg=="
},
"node_modules/split-on-first": {
"version": "1.1.0",
@@ -1054,9 +1146,9 @@
}
},
"node_modules/ts-node": {
"version": "10.9.1",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz",
"integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==",
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
@@ -1105,9 +1197,9 @@
}
},
"node_modules/typescript": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz",
"integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==",
"version": "5.4.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
"dev": true,
"peer": true,
"bin": {
@@ -1115,7 +1207,7 @@
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=12.20"
"node": ">=14.17"
}
},
"node_modules/universalify": {
@@ -1126,14 +1218,6 @@
"node": ">= 4.0.0"
}
},
"node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
@@ -1193,15 +1277,15 @@
}
},
"@jridgewell/resolve-uri": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz",
"integrity": "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true
},
"@jridgewell/sourcemap-codec": {
"version": "1.4.14",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz",
"integrity": "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==",
"version": "1.4.15",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
"integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==",
"dev": true
},
"@jridgewell/trace-mapping": {
@@ -1255,9 +1339,9 @@
"integrity": "sha512-PJBIAKS3aMsFTHeQLfAtVpZOduAqGNZZAEH6Kb15htGUcSJWHZ9r2LAjxm3fD4yWT9plYlO0CthcEVnlrrwQLA=="
},
"@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
"integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==",
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
"dev": true
},
"@tsconfig/node12": {
@@ -1273,33 +1357,27 @@
"dev": true
},
"@tsconfig/node16": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.3.tgz",
"integrity": "sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==",
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
"dev": true
},
"@types/node": {
"version": "16.18.16",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.16.tgz",
"integrity": "sha512-ZOzvDRWp8dCVBmgnkIqYCArgdFOO9YzocZp8Ra25N/RStKiWvMOXHMz+GjSeVNe5TstaTmTWPucGJkDw0XXJWA==",
"dev": true
},
"@types/uuid": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.4.tgz",
"integrity": "sha512-c/I8ZRb51j+pYGAu5CrFMRxqZ2ke4y2grEBO5AUjgSkSk+qT2Ea+OdWElz/OiMf5MNpn2b17kuVBwZLQJXzihw==",
"version": "16.18.96",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.96.tgz",
"integrity": "sha512-84iSqGXoO+Ha16j8pRZ/L90vDMKX04QTYMTfYeE1WrjWaZXuchBehGUZEpNgx7JnmlrIHdnABmpjrQjhCnNldQ==",
"dev": true
},
"acorn": {
"version": "8.8.2",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz",
"integrity": "sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw==",
"version": "8.11.3",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
"integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
"dev": true
},
"acorn-walk": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz",
"integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==",
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz",
"integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==",
"dev": true
},
"arg": {
@@ -1336,12 +1414,15 @@
}
},
"call-bind": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz",
"integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==",
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz",
"integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==",
"requires": {
"function-bind": "^1.1.1",
"get-intrinsic": "^1.0.2"
"es-define-property": "^1.0.0",
"es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"get-intrinsic": "^1.2.4",
"set-function-length": "^1.2.1"
}
},
"camelcase": {
@@ -1427,6 +1508,16 @@
"resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz",
"integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ=="
},
"define-data-property": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
"integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
"requires": {
"es-define-property": "^1.0.0",
"es-errors": "^1.3.0",
"gopd": "^1.0.1"
}
},
"diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
@@ -1441,6 +1532,19 @@
"is-arrayish": "^0.2.1"
}
},
"es-define-property": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz",
"integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==",
"requires": {
"get-intrinsic": "^1.2.4"
}
},
"es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="
},
"filter-obj": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/filter-obj/-/filter-obj-1.1.0.tgz",
@@ -1455,9 +1559,9 @@
}
},
"function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="
},
"generate-function": {
"version": "2.3.1",
@@ -1468,13 +1572,23 @@
}
},
"get-intrinsic": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
"integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz",
"integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==",
"requires": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
"has-symbols": "^1.0.3"
"es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"has-proto": "^1.0.1",
"has-symbols": "^1.0.3",
"hasown": "^2.0.0"
}
},
"gopd": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz",
"integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==",
"requires": {
"get-intrinsic": "^1.1.3"
}
},
"graceful-fs": {
@@ -1482,19 +1596,32 @@
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="
},
"has": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
"has-property-descriptors": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
"integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
"requires": {
"function-bind": "^1.1.1"
"es-define-property": "^1.0.0"
}
},
"has-proto": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
"integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q=="
},
"has-symbols": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
"integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A=="
},
"hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"requires": {
"function-bind": "^1.1.2"
}
},
"hosted-git-info": {
"version": "2.8.9",
"resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
@@ -1506,9 +1633,9 @@
"integrity": "sha512-BYqTHXTGUIvg7t1r4sJNKcbDZkL92nkXA8YtRpbjFHRHGDL/NtUeiBJMeE60kIFN/Mg8ESaWQvftaYMGJzQZCQ=="
},
"ip": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.8.tgz",
"integrity": "sha512-PuExPYUiu6qMBQb4l06ecm6T6ujzhmh+MeJcW9wa89PoAz5pvd4zPgN5WJV104mb6S2T1AwNIAaB70JNrLQWhg=="
"version": "1.1.9",
"resolved": "https://registry.npmjs.org/ip/-/ip-1.1.9.tgz",
"integrity": "sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ=="
},
"ip2buf": {
"version": "2.0.0",
@@ -1521,11 +1648,11 @@
"integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg=="
},
"is-core-module": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz",
"integrity": "sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==",
"version": "2.13.1",
"resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz",
"integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==",
"requires": {
"has": "^1.0.3"
"hasown": "^2.0.0"
}
},
"is-plain-obj": {
@@ -1669,9 +1796,9 @@
"integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A=="
},
"object-inspect": {
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g=="
"version": "1.13.1",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz",
"integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ=="
},
"p-limit": {
"version": "1.3.0",
@@ -1760,11 +1887,11 @@
"integrity": "sha512-IgjKyaUSjsROSO8/D49Ab7hP8mJgTYcqApOqdPhLoPxAplXmkp+zRvsrSQjFn5by0rhm4VH0GAUELIPpx7B1yg=="
},
"qs": {
"version": "6.11.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.1.tgz",
"integrity": "sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ==",
"version": "6.12.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz",
"integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==",
"requires": {
"side-channel": "^1.0.4"
"side-channel": "^1.0.6"
}
},
"query-string": {
@@ -1812,11 +1939,11 @@
}
},
"resolve": {
"version": "1.22.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz",
"integrity": "sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw==",
"version": "1.22.8",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz",
"integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==",
"requires": {
"is-core-module": "^2.9.0",
"is-core-module": "^2.13.0",
"path-parse": "^1.0.7",
"supports-preserve-symlinks-flag": "^1.0.0"
}
@@ -1836,6 +1963,19 @@
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
"integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g=="
},
"set-function-length": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
"integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
"requires": {
"define-data-property": "^1.1.4",
"es-errors": "^1.3.0",
"function-bind": "^1.1.2",
"get-intrinsic": "^1.2.4",
"gopd": "^1.0.1",
"has-property-descriptors": "^1.0.2"
}
},
"shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
@@ -1852,13 +1992,14 @@
"dev": true
},
"side-channel": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
"integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz",
"integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==",
"requires": {
"call-bind": "^1.0.0",
"get-intrinsic": "^1.0.2",
"object-inspect": "^1.9.0"
"call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.4",
"object-inspect": "^1.13.1"
}
},
"signal-exit": {
@@ -1876,9 +2017,9 @@
}
},
"spdx-exceptions": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz",
"integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A=="
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz",
"integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w=="
},
"spdx-expression-parse": {
"version": "3.0.1",
@@ -1890,9 +2031,9 @@
}
},
"spdx-license-ids": {
"version": "3.0.13",
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.13.tgz",
"integrity": "sha512-XkD+zwiqXHikFZm4AX/7JSCXA98U5Db4AFd5XUg/+9UNtnH75+Z9KxtpYiJZx36mUDVOwH83pl7yvCer6ewM3w=="
"version": "3.0.17",
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.17.tgz",
"integrity": "sha512-sh8PWc/ftMqAAdFiBu6Fy6JUOYjqDJBJvIhpfDMyHrr0Rbp5liZqd4TjtQ/RgfLjKFZb+LMx5hpml5qOWy0qvg=="
},
"split-on-first": {
"version": "1.1.0",
@@ -1942,9 +2083,9 @@
"integrity": "sha512-MTBWv3jhVjTU7XR3IQHllbiJs8sc75a80OEhB6or/q7pLTWgQ0bMGQXXYQSrSuXe6WiKWDZ5txXY5P59a/coVA=="
},
"ts-node": {
"version": "10.9.1",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.1.tgz",
"integrity": "sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==",
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"requires": {
"@cspotcode/source-map-support": "^0.8.0",
@@ -1968,9 +2109,9 @@
"integrity": "sha512-8yyRd1ZdNp+AQLGqi3lTaA2k81JjlIZOyFQEsi7GQWBgirnQOxjqVtDEbYHM2Z4yFdJ5AQw0fxBLLnDCl6RXoQ=="
},
"typescript": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz",
"integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==",
"version": "5.4.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
"dev": true,
"peer": true
},
@@ -1979,11 +2120,6 @@
"resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz",
"integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg=="
},
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
},
"v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/bticino",
"version": "0.0.15",
"version": "0.0.16",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",
@@ -34,14 +34,12 @@
"dependencies": {
"@slyoldfox/sip": "^0.0.6-1",
"sdp": "^3.0.3",
"stun": "^2.1.0",
"uuid": "^8.3.2"
"stun": "^2.1.0"
},
"devDependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^16.9.6",
"@types/uuid": "^8.3.4",
"cross-env": "^7.0.3",
"ts-node": "^10.9.1"
}

View File

@@ -1,22 +1,22 @@
import { closeQuiet, createBindUdp, createBindZero, listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { createBindUdp, listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { sleep } from '@scrypted/common/src/sleep';
import { RtspServer } from '@scrypted/common/src/rtsp-server';
import { addTrackControls, parseSdp } from '@scrypted/common/src/sdp-utils';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, MotionSensor, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, MotionSensor, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDeviceBase, ScryptedInterface, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import { SipCallSession } from '../../sip/src/sip-call-session';
import { RtpDescription, getPayloadType, getSequenceNumber, isRtpMessagePayloadType, isStunMessage } from '../../sip/src/rtp-utils';
import { VoicemailHandler } from './bticino-voicemailHandler';
import { CompositeSipMessageHandler } from '../../sip/src/compositeSipMessageHandler';
import { SipHelper } from './sip-helper';
import child_process, { ChildProcess } from 'child_process';
import dgram from 'dgram';
import { BticinoStorageSettings } from './storage-settings';
import { BticinoSipPlugin } from './main';
import { BticinoSipLock } from './bticino-lock';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
import { safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
import { PersistentSipManager } from './persistent-sip-manager';
import { InviteHandler } from './bticino-inviteHandler';
import { SipOptions, SipRequest } from '../../sip/src/sip-manager';
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
import fs from "fs"
import url from "url"
import path from 'path';
@@ -37,8 +37,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor
private session: SipCallSession
private remoteRtpDescription: Promise<RtpDescription>
private audioOutForwarder: dgram.Socket
private audioOutProcess: ChildProcess
private forwarder
private refreshTimeout: NodeJS.Timeout
public requestHandlers: CompositeSipMessageHandler = new CompositeSipMessageHandler()
public incomingCallRequest : SipRequest
@@ -276,21 +275,27 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor
}
async takePicture(option?: PictureOptions): Promise<MediaObject> {
const thumbnailCacheTime : number = parseInt( this.storage?.getItem('thumbnailCacheTime') ) * 1000 || 300000
const now = new Date().getTime()
if( !this.lastImageRefresh || this.lastImageRefresh + thumbnailCacheTime < now ) {
// get a proxy object to make sure we pass prebuffer when already watching a stream
let cam : VideoCamera = sdk.systemManager.getDeviceById<VideoCamera>(this.id)
let vs : MediaObject = await cam.getVideoStream()
let buf : Buffer = await mediaManager.convertMediaObjectToBuffer(vs, 'image/jpeg');
this.cachedImage = buf
this.lastImageRefresh = new Date().getTime()
this.console.log(`Camera picture updated and cached: ${this.lastImageRefresh} + cache time: ${thumbnailCacheTime} < ${now}`)
let rebroadcastEnabled = this.interfaces?.includes( "mixin:@scrypted/prebuffer-mixin")
if( rebroadcastEnabled ) {
const thumbnailCacheTime : number = parseInt( this.storage?.getItem('thumbnailCacheTime') ) * 1000 || 300000
const now = new Date().getTime()
if( !this.lastImageRefresh || this.lastImageRefresh + thumbnailCacheTime < now ) {
// get a proxy object to make sure we pass prebuffer when already watching a stream
let cam : VideoCamera = sdk.systemManager.getDeviceById<VideoCamera>(this.id)
let vs : MediaObject = await cam.getVideoStream()
let buf : Buffer = await mediaManager.convertMediaObjectToBuffer(vs, 'image/jpeg');
this.cachedImage = buf
this.lastImageRefresh = new Date().getTime()
this.console.log(`Camera picture updated and cached: ${this.lastImageRefresh} + cache time: ${thumbnailCacheTime} < ${now}`)
} else {
this.console.log(`Not refreshing camera picture: ${this.lastImageRefresh} + cache time: ${thumbnailCacheTime} < ${now}`)
}
return mediaManager.createMediaObject(this.cachedImage, 'image/jpeg')
} else {
this.console.log(`Not refreshing camera picture: ${this.lastImageRefresh} + cache time: ${thumbnailCacheTime} < ${now}`)
throw new Error("To enable snapshots, enable rebroadcast plugin or set a Snapshot URL in the Snapshot plugin to an external image.");
}
return mediaManager.createMediaObject(this.cachedImage, 'image/jpeg')
}
async getPictureOptions(): Promise<PictureOptions[]> {
@@ -317,52 +322,31 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor
this.session = await this.callIntercom( cleanup )
}
this.stopIntercom();
const ffmpegInput: FFmpegInput = JSON.parse((await mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput)).toString());
const audioOutForwarder = await createBindZero()
this.audioOutForwarder = audioOutForwarder.server
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(media, ScryptedMimeTypes.FFmpegInput);
let address = (await this.remoteRtpDescription).address
audioOutForwarder.server.on('message', message => {
if( this.session )
this.session.audioSplitter.send(message, 40004, address)
return null
});
const args = ffmpegInput.inputArguments.slice();
args.push(
'-vn', '-dn', '-sn',
'-acodec', 'speex',
'-flags', '+global_header',
'-ac', '1',
'-ar', '8k',
'-f', 'rtp',
//'-srtp_out_suite', 'AES_CM_128_HMAC_SHA1_80',
//'-srtp_out_params', encodeSrtpOptions(this.decodedSrtpOptions),
`rtp://127.0.0.1:${audioOutForwarder.port}?pkt_size=188`,
);
this.console.log("===========================================")
safePrintFFmpegArguments( this.console, args )
this.console.log("===========================================")
const cp = child_process.spawn(await mediaManager.getFFmpegPath(), args);
ffmpegLogInitialOutput(this.console, cp)
this.audioOutProcess = cp;
cp.on('exit', () => this.console.log('two way audio ended'));
this.session.onCallEnded.subscribe(() => {
closeQuiet(audioOutForwarder.server);
safeKillFFmpeg(cp)
this.forwarder = await startRtpForwarderProcess(this.console, ffmpegInput, {
audio: {
codecCopy: 'speex',
encoderArguments: [
'-vn', '-sn', '-dn',
'-acodec', 'speex',
'-flags', '+global_header',
'-ac', '1',
'-ar', '8k',
'-f', 'rtp',
],
onRtp: rtp => {
this.session?.audioSplitter?.send(rtp, 40004, address)
}
}
});
}
async stopIntercom(): Promise<void> {
closeQuiet(this.audioOutForwarder)
this.audioOutProcess?.kill('SIGKILL')
this.audioOutProcess = undefined
this.audioOutForwarder = undefined
this.forwarder?.kill()
this.forwarder = undefined
}
resetStreamTimeout() {
@@ -572,12 +556,24 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor
// Call the C300X
this.remoteRtpDescription = sip.callOrAcceptInvite(
( audio ) => {
return [
// this SDP is used by the intercom and will send the encrypted packets which we don't care about to the loopback on port 65000 of the intercom
`m=audio 65000 RTP/SAVP 110`,
`a=rtpmap:110 speex/8000`,
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${this.keyAndSalt}`,
]
let audioSection = [
// this SDP is used by the intercom and will send the encrypted packets which we don't care about to the loopback on port 65000 of the intercom
`m=audio 65000 RTP/SAVP 110`,
`a=rtpmap:110 speex/8000`,
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${this.keyAndSalt}`,
]
if( !this.incomingCallRequest ) {
let DEVADDR = this.storage.getItem('DEVADDR');
if( DEVADDR ) {
audioSection.unshift('a=DEVADDR:' + DEVADDR)
} else {
if( sipOptions.to.toLocaleLowerCase().indexOf('c300x') >= 0 || sipOptions.to.toLocaleLowerCase().indexOf('c100x') >= 0 ) {
// Needed for bt_answering_machine (bticino specific), to check for c100X
audioSection.unshift('a=DEVADDR:20')
}
}
}
return audioSection
}, ( video ) => {
return [
// this SDP is used by the intercom and will send the encrypted packets which we don't care about to the loopback on port 65000 of the intercom

File diff suppressed because it is too large Load Diff

View File

@@ -42,7 +42,7 @@
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"bpmux": "^8.2.1",
"cloudflared": "^0.4.0",
"cloudflared": "^0.5.2",
"exponential-backoff": "^3.1.1",
"http-proxy": "^1.18.1",
"nat-upnp": "file:./external/node-nat-upnp"
@@ -51,7 +51,7 @@
"@types/http-proxy": "^1.17.14",
"@types/ip": "^1.1.3",
"@types/nat-upnp": "^1.1.5",
"@types/node": "^20.11.19"
"@types/node": "^20.14.6"
},
"version": "0.2.13"
"version": "0.2.15"
}

View File

@@ -531,8 +531,9 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
throw new Error('@scrypted/cloud is not logged in.');
const q = qsstringify({
scope: local.pathname,
serverId: this.storageSettings.values.serverId,
ttl,
})
});
const scope = await httpFetch({
url: `https://${this.getHostname()}/_punch/scope?${q}`,
headers: {
@@ -951,13 +952,13 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
}
async startCloudflared() {
if (!this.storageSettings.values.cloudflareEnabled) {
this.console.log('cloudflared is disabled.');
return;
}
while (true) {
try {
if (!this.storageSettings.values.cloudflareEnabled) {
this.console.log('cloudflared is disabled.');
return;
}
this.console.log('starting cloudflared');
this.cloudflared = await backOff(async () => {
const pluginVolume = process.env.SCRYPTED_PLUGIN_VOLUME;
@@ -1057,12 +1058,13 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
maxDelay: 300000,
});
await once(this.cloudflared.child, 'exit');
throw new Error('cloudflared exited.');
await once(this.cloudflared.child, 'exit').catch(() => { });
// the successfully started cloudflared process may exit at some point, loop and allow it to restart.
this.console.error('cloudflared exited');
}
catch (e) {
// this error may be reached if the cloudflared backoff fails.
this.console.error('cloudflared error', e);
throw e;
}
finally {
this.cloudflared = undefined;

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.3.24",
"version": "0.3.28",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.3.24",
"version": "0.3.28",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.3.24",
"version": "0.3.28",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -2,6 +2,7 @@ import fs from 'fs';
import child_process from 'child_process';
import { once } from 'events';
import sdk from '@scrypted/sdk';
import { stdout } from 'process';
export const SCRYPTED_INSTALL_ENVIRONMENT_LXC = 'lxc';
@@ -41,6 +42,37 @@ export async function checkLxcDependencies() {
sdk.log.a('Failed to daemon-reload systemd.');
}
try {
// intel opencl icd is broken from their official apt repos on kernel versions 6.8, which ships with ubuntu 24.04 and proxmox 8.2.
// the intel apt repo has not been updated yet.
// the current workaround is to install the release manually.
// https://github.com/intel/compute-runtime/releases/tag/24.13.29138.7
const output = await new Promise<string>((r,f)=> child_process.exec("sh -c 'apt show versions intel-opencl-icd'", (err, stdout, stderr) => {
if (err)
f(err);
else
r(stdout + '\n' + stderr);
}));
if (
// apt
output.includes('Version: 23')
// was installed via script at some point
|| output.includes('Version: 24.13.29138.7')
// current script version: 24.17.29377.6
) {
const cp = child_process.spawn('sh', ['-c', 'curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash']);
const [exitCode] = await once(cp, 'exit');
if (exitCode !== 0)
sdk.log.a('Failed to install intel-opencl-icd.');
else
needRestart = true;
}
}
catch (e) {
sdk.log.a('Failed to verify/install intel-opencl-icd version.');
}
if (needRestart)
sdk.log.a('A system update is pending. Please restart Scrypted to apply changes.');
}

View File

@@ -26,6 +26,7 @@ export function loginScrypted(username: string, password: string, change_passwor
username,
password,
change_password,
maxAge: 7 * 24 * 60 * 60 * 1000,
});
}

View File

@@ -161,10 +161,10 @@ export default {
let t = ``;
let toffset = 0;
if (detection.score && detection.className !== 'motion') {
t += `<tspan x='${x}' dy='${toffset}em'>${Math.round(detection.score * 100) / 100}</tspan>`
t += `<tspan x='${x}' dy='${toffset}em'>${Math.round((detection.labelScore || detection.score) * 100) / 100}</tspan>`
toffset -= 1.2;
}
const tname = detection.className + (detection.id ? `: ${detection.id}` : '')
const tname = (detection.label || detection.className) + (detection.id ? `: ${detection.id}` : '')
t += `<tspan x='${x}' dy='${toffset}em'>${tname}</tspan>`
const fs = 20;

View File

@@ -1,25 +1,25 @@
{
"name": "@scrypted/coreml",
"version": "0.1.49",
"version": "0.1.65",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.49",
"version": "0.1.65",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.101",
"version": "0.3.31",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"axios": "^1.6.5",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
@@ -65,7 +65,7 @@
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"axios": "^1.6.5",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",

View File

@@ -42,5 +42,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.49"
"version": "0.1.65"
}

View File

@@ -26,14 +26,16 @@ predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "CoreML-Predict")
availableModels = [
"Default",
"scrypted_yolov10m_320",
"scrypted_yolov10n_320",
"scrypted_yolo_nas_s_320",
"scrypted_yolov9e_320",
"scrypted_yolov9c_320",
"scrypted_yolov9c",
"scrypted_yolov9s_320",
"scrypted_yolov9t_320",
"scrypted_yolov6n_320",
"scrypted_yolov6n",
"scrypted_yolov6s_320",
"scrypted_yolov6s",
"scrypted_yolov8n_320",
"scrypted_yolov8n",
"ssdlite_mobilenet_v2",
"yolov4-tiny",
]
@@ -77,9 +79,11 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
self.storage.setItem("model", "Default")
model = "scrypted_yolov9c_320"
self.yolo = "yolo" in model
self.scrypted_yolov10n = "scrypted_yolov10" in model
self.scrypted_yolo_nas = "scrypted_yolo_nas" in model
self.scrypted_yolo = "scrypted_yolo" in model
self.scrypted_model = "scrypted" in model
model_version = "v7"
model_version = "v8"
mlmodel = "model" if self.scrypted_yolo else model
print(f"model: {model}")
@@ -132,6 +136,8 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
self.loop = asyncio.get_event_loop()
self.minThreshold = 0.2
self.faceDevice = None
self.textDevice = None
asyncio.ensure_future(self.prepareRecognitionModels(), loop=self.loop)
async def prepareRecognitionModels(self):
@@ -169,9 +175,11 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
async def getDevice(self, nativeId: str) -> Any:
if nativeId == "facerecognition":
return CoreMLFaceRecognition(nativeId)
self.faceDevice = self.faceDevice or CoreMLFaceRecognition(nativeId)
return self.faceDevice
if nativeId == "textrecognition":
return CoreMLTextRecognition(nativeId)
self.textDevice = self.textDevice or CoreMLTextRecognition(nativeId)
return self.textDevice
raise Exception("unknown device")
async def getSettings(self) -> list[Setting]:
@@ -211,6 +219,18 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
if self.yolo:
out_dict = await self.queue_batch({self.input_name: input})
if self.scrypted_yolov10n:
results = list(out_dict.values())[0][0]
objs = yolo.parse_yolov10(results)
ret = self.create_detection_result(objs, src_size, cvss)
return ret
if self.scrypted_yolo_nas:
predictions = list(out_dict.values())
objs = yolo.parse_yolo_nas(predictions)
ret = self.create_detection_result(objs, src_size, cvss)
return ret
if self.scrypted_yolo:
results = list(out_dict.values())[0][0]
objs = yolo.parse_yolov9(results)

View File

@@ -3,6 +3,7 @@ from __future__ import annotations
import concurrent.futures
import os
import asyncio
import coremltools as ct
import numpy as np
# import Quartz
@@ -10,6 +11,7 @@ import numpy as np
# import Vision
from predict.face_recognize import FaceRecognizeDetection
from PIL import Image
def euclidean_distance(arr1, arr2):
@@ -29,6 +31,8 @@ predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "Vision-Predict")
class CoreMLFaceRecognition(FaceRecognizeDetection):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
self.detectExecutor = concurrent.futures.ThreadPoolExecutor(1, "detect-face")
self.recogExecutor = concurrent.futures.ThreadPoolExecutor(1, "recog-face")
def downloadModel(self, model: str):
model_version = "v7"
@@ -51,23 +55,29 @@ class CoreMLFaceRecognition(FaceRecognizeDetection):
inputName = model.get_spec().description.input[0].name
return model, inputName
def predictDetectModel(self, input):
model, inputName = self.detectModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0][0]
async def predictDetectModel(self, input: Image.Image):
def predict():
model, inputName = self.detectModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0][0]
return results
results = await asyncio.get_event_loop().run_in_executor(
self.detectExecutor, lambda: predict()
)
return results
def predictFaceModel(self, input):
model, inputName = self.faceModel
out_dict = model.predict({inputName: input})
return out_dict["var_2167"][0]
async def predictFaceModel(self, input: np.ndarray):
def predict():
model, inputName = self.faceModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0][0]
return results
results = await asyncio.get_event_loop().run_in_executor(
self.recogExecutor, lambda: predict()
)
return results
def predictTextModel(self, input):
model, inputName = self.textModel
out_dict = model.predict({inputName: input})
preds = out_dict["linear_2"]
return preds
# def predictVision(self, input: Image.Image) -> asyncio.Future[list[Prediction]]:
# buffer = input.tobytes()
# myData = NSData.alloc().initWithBytes_length_(buffer, len(buffer))

View File

@@ -1,8 +1,13 @@
from __future__ import annotations
import concurrent.futures
import os
import asyncio
import coremltools as ct
import numpy as np
from PIL import Image
from predict.text_recognize import TextRecognition
@@ -11,8 +16,11 @@ class CoreMLTextRecognition(TextRecognition):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
self.detectExecutor = concurrent.futures.ThreadPoolExecutor(1, "detect-text")
self.recogExecutor = concurrent.futures.ThreadPoolExecutor(1, "recog-text")
def downloadModel(self, model: str):
model_version = "v7"
model_version = "v8"
mlmodel = "model"
files = [
@@ -32,14 +40,24 @@ class CoreMLTextRecognition(TextRecognition):
inputName = model.get_spec().description.input[0].name
return model, inputName
def predictDetectModel(self, input):
model, inputName = self.detectModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0]
async def predictDetectModel(self, input: Image.Image):
def predict():
model, inputName = self.detectModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0]
return results
results = await asyncio.get_event_loop().run_in_executor(
self.detectExecutor, lambda: predict()
)
return results
def predictTextModel(self, input):
model, inputName = self.textModel
out_dict = model.predict({inputName: input})
preds = out_dict["linear_2"]
async def predictTextModel(self, input: np.ndarray):
def predict():
model, inputName = self.textModel
out_dict = model.predict({inputName: input})
preds = out_dict["linear_2"]
return preds
preds = await asyncio.get_event_loop().run_in_executor(
self.recogExecutor, lambda: predict()
)
return preds

View File

@@ -1 +1 @@
../../tensorflow-lite/src/detect
../../openvino/src/detect/

View File

@@ -1 +1 @@
../../tensorflow-lite/src/predict
../../openvino/src/predict

View File

@@ -1 +1 @@
opencv-python
opencv-python==4.10.0.82

View File

@@ -1,3 +1,4 @@
# 2024-04-23 - modify timestamp to force pip reinstall
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
numpy==1.26.4
coremltools==7.1
Pillow>=5.4.1
Pillow==10.3.0

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.147",
"version": "0.0.149",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/hikvision",
"version": "0.0.147",
"version": "0.0.149",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.147",
"version": "0.0.149",
"description": "Hikvision Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -189,7 +189,9 @@ export class HikvisionCameraAPI {
continue;
if (ignore === boundaryEnd)
continue;
if (ignore !== boundary) {
if (ignore !== boundary
// older hikvision nvr send a boundary in the headers, but then use a totally different constant boundary value
&& ignore != "--boundary") {
this.console.error('expected boundary but found', ignore);
throw new Error('expected boundary');
}

View File

@@ -134,7 +134,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
const xml = await xml2js.parseStringPromise(data);
const [channelId] = xml.EventNotificationAlert.channelID;
const [channelId] = xml.EventNotificationAlert.channelID || xml.EventNotificationAlert.dynChannelID;
if (!await checkCameraNumber(channelId)) {
this.console.warn('chann fail')
return;

View File

@@ -32,10 +32,13 @@ If recordings dont work, it's generally because of a few reasons, **follow the s
### HomeKit Discovery and Pairing Issues
* Ensure all your Home hubs are online and updated. Power cycling them is recommended in case one is stuck.
* Ensure all your Apple TV and Home Pods are online and updated. Power cycling them is recommended in case one is stuck.
* Ensure your Apple TV and Home Pods are on the same subnet as the Scrypted server.
* Ensure LAN/WLAN multicast is enabled on your router.
* Ensure the iOS device you are using for pairing is on the same network (pairing will fail on cellular).
* Ensure the Docker installation (if applicable) is using host networking. This configuration is the default if the official Scrypted Docker compose install script was used.
* Try switching the mDNS advertiser used in the HomeKit plugin settings.
* Try disabling IGMP Snooping on your router.
### HomeKit Live Streaming Timeout (Recordings may be working)

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/homekit",
"version": "1.2.54",
"version": "1.2.57",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/homekit",
"version": "1.2.54",
"version": "1.2.57",
"dependencies": {
"@koush/werift-src": "file:../../external/werift",
"check-disk-space": "^3.4.0",
@@ -47,26 +47,20 @@
"examples/*"
],
"devDependencies": {
"@biomejs/biome": "^1.4.1",
"@types/jest": "^29.5.11",
"@types/node": "^20.10.4",
"@typescript-eslint/eslint-plugin": "^6.14.0",
"@typescript-eslint/parser": "^6.14.0",
"eslint": "^8.55.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-simple-import-sort": "^10.0.0",
"@types/node": "^20.10.6",
"jest": "^29.7.0",
"knip": "^3.7.0",
"knip": "^3.9.0",
"node-actionlint": "^1.2.2",
"organize-imports-cli": "^0.10.0",
"prettier": "^3.1.1",
"process": "^0.11.10",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.2",
"ts-node-dev": "^2.0.0",
"typedoc": "0.25.4",
"typedoc": "0.25.5",
"typedoc-plugin-markdown": "3.17.1",
"typescript": "5.0.4"
"typescript": "5.3.3"
},
"engines": {
"node": ">=16"
@@ -127,7 +121,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.18",
"version": "0.3.29",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -1306,26 +1300,20 @@
"@koush/werift-src": {
"version": "file:../../external/werift",
"requires": {
"@biomejs/biome": "^1.4.1",
"@types/jest": "^29.5.11",
"@types/node": "^20.10.4",
"@typescript-eslint/eslint-plugin": "^6.14.0",
"@typescript-eslint/parser": "^6.14.0",
"eslint": "^8.55.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-simple-import-sort": "^10.0.0",
"@types/node": "^20.10.6",
"jest": "^29.7.0",
"knip": "^3.7.0",
"knip": "^3.9.0",
"node-actionlint": "^1.2.2",
"organize-imports-cli": "^0.10.0",
"prettier": "^3.1.1",
"process": "^0.11.10",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.2",
"ts-node-dev": "^2.0.0",
"typedoc": "0.25.4",
"typedoc": "0.25.5",
"typedoc-plugin-markdown": "3.17.1",
"typescript": "5.0.4"
"typescript": "5.3.3"
}
},
"@leichtgewicht/ip-codec": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/homekit",
"version": "1.2.54",
"version": "1.2.57",
"description": "HomeKit Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -166,10 +166,12 @@ export class HomeKitPlugin extends ScryptedDeviceBase implements MixinProvider,
case MDNSAdvertiser.CIAO:
break;
default:
if (fs.existsSync('/var/run/avahi-daemon/'))
advertiser = MDNSAdvertiser.AVAHI;
else
advertiser = MDNSAdvertiser.CIAO;
advertiser = MDNSAdvertiser.CIAO;
// this avahi detection doesn't work sometimes? fails silently.
// if (fs.existsSync('/var/run/avahi-daemon/'))
// advertiser = MDNSAdvertiser.AVAHI;
// else
// advertiser = MDNSAdvertiser.CIAO;
break;
}
return advertiser;
@@ -267,8 +269,6 @@ export class HomeKitPlugin extends ScryptedDeviceBase implements MixinProvider,
},
undefined, 'Pairing'));
storageSettings.settings.pincode.persistedDefaultValue = randomPinCode();
// TODO: change this value after this current default has been persisted to existing clients.
// changing it now will cause existing accessories be renamed.
storageSettings.settings.addIdentifyingMaterial.persistedDefaultValue = false;
const mixinConsole = deviceManager.getMixinConsole(device.id, this.nativeId);

View File

@@ -117,7 +117,7 @@ addSupportedType({
},
closeRecordingStream(streamId, reason) {
const r = openRecordingStreams.get(streamId);
r?.throw(new Error(reason?.toString()));
console.log(`motion recording closed ${reason > 0 ? `(error code: ${reason})` : ''}`);
openRecordingStreams.delete(streamId);
},
updateRecordingActive(active) {

View File

@@ -321,6 +321,9 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
let moov: Buffer[];
for await (const box of generator) {
if (!isOpen())
return;
const { header, type, data } = box;
// console.log('motion fragment box', type);
@@ -352,6 +355,8 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
needSkip = false;
continue;
}
if (!isOpen())
return;
const fragment = Buffer.concat(pending);
saveFragment(i, fragment);
pending = [];
@@ -361,8 +366,6 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
data: fragment,
isLast,
}
if (!isOpen())
return;
yield recordingPacket;
if (wasLast)
break;
@@ -370,7 +373,7 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
}
}
catch (e) {
console.log(`motion recording completed ${e}`);
console.log(`motion recording error ${e}`);
}
finally {
console.log(`motion recording finished`);

View File

@@ -24,8 +24,6 @@ export function createSnapshotHandler(device: ScryptedDevice & VideoCamera & Cam
width: request.width,
height: request.height,
},
// wait up to 2 seconds for the snapshot image, fallback to cached image
timeout: 2000,
})
return await mediaManager.convertMediaObjectToBuffer(media, 'image/jpeg');
}

View File

@@ -354,15 +354,11 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
if (twoWayAudio) {
let rtspServer: RtspServer;
let track: string;
let playing = false;
session.audioReturn.once('message', async buffer => {
let twoWayAudioState: 'stopped' | 'starting' | 'started' = 'stopped';
const start = async () => {
try {
const decrypted = srtpSession.decrypt(buffer);
const rtp = RtpPacket.deSerialize(decrypted);
if (rtp.header.payloadType !== session.startRequest.audio.pt)
return;
twoWayAudioState = 'starting';
const { clientPromise, url } = await listenZeroSingleClient();
const rtspUrl = url.replace('tcp', 'rtsp');
let sdp = createReturnAudioSdp(session.startRequest.audio);
@@ -393,7 +389,7 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
device.stopIntercom();
client.destroy();
rtspServer = undefined;
playing = false;
twoWayAudioState = 'stopped';
}
// stop the intercom if the client dies for any reason.
// allow the streaming session to continue however.
@@ -402,16 +398,17 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
rtspServer = new RtspServer(client, sdp);
await rtspServer.handlePlayback();
playing = true;
twoWayAudioState = 'started';
}
catch (e) {
console.error('two way audio failed', e);
twoWayAudioState = 'stopped';
}
});
};
const srtpSession = new SrtpSession(session.aconfig);
session.audioReturn.on('message', buffer => {
if (!playing)
if (twoWayAudioState === 'starting')
return;
const decrypted = srtpSession.decrypt(buffer);
@@ -420,6 +417,9 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
if (rtp.header.payloadType !== session.startRequest.audio.pt)
return;
if (twoWayAudioState !== 'started')
return start();
rtspServer.sendTrack(track, decrypted, false);
});
}

View File

@@ -1,4 +1,4 @@
import { Entry, EntrySensor, ScryptedDevice, ScryptedDeviceType, ScryptedInterface } from '@scrypted/sdk';
import { Brightness, Entry, EntrySensor, ScryptedDevice, ScryptedDeviceType, ScryptedInterface } from '@scrypted/sdk';
import { addSupportedType, bindCharacteristic, DummyDevice, } from '../common';
import { Characteristic, CharacteristicEventTypes, CharacteristicSetCallback, CharacteristicValue, NodeCallback, Service } from '../hap';
import { makeAccessory } from './common';
@@ -7,38 +7,62 @@ import type { HomeKitPlugin } from "../main";
addSupportedType({
type: ScryptedDeviceType.WindowCovering,
probe(device: DummyDevice): boolean {
return device.interfaces.includes(ScryptedInterface.Entry) && device.interfaces.includes(ScryptedInterface.EntrySensor);
return (device.interfaces.includes(ScryptedInterface.Entry) && device.interfaces.includes(ScryptedInterface.EntrySensor))
|| device.interfaces.includes(ScryptedInterface.Brightness);
},
getAccessory: async (device: ScryptedDevice & Entry & EntrySensor, homekitPlugin: HomeKitPlugin) => {
getAccessory: async (device: ScryptedDevice & Entry & EntrySensor & Brightness, homekitPlugin: HomeKitPlugin) => {
const accessory = makeAccessory(device, homekitPlugin);
const service = accessory.addService(Service.WindowCovering, device.name);
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.CurrentPosition,
() => !!device.entryOpen ? 100 : 0);
if (device.interfaces.includes(ScryptedInterface.Entry)) {
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.CurrentPosition,
() => !!device.entryOpen ? 100 : 0);
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.TargetPosition,
() => !!device.entryOpen ? 100 : 0);
let targetPosition = !!device.entryOpen ? 100 : 0;
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.TargetPosition,
() => targetPosition);
let props = {
minValue: 0,
maxValue: 100,
minStep: 100,
};
let targetState = !!device.entryOpen ? 100 : 0;
service.getCharacteristic(Characteristic.TargetPosition)
.setProps(props)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
if (value === 100) {
targetState = 100;
device.openEntry();
}
else {
targetState = 0;
device.closeEntry();
}
})
const props = {
minValue: 0,
maxValue: 100,
minStep: 100,
};
service.getCharacteristic(Characteristic.TargetPosition)
.setProps(props)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
if (value === 100) {
targetPosition = 100;
device.openEntry();
}
else {
targetPosition = 0;
device.closeEntry();
}
});
}
else if (device.interfaces.includes(ScryptedInterface.Brightness)) {
bindCharacteristic(device, ScryptedInterface.Brightness, service, Characteristic.CurrentPosition,
() => device.brightness || 0);
let targetPosition = device.brightness || 0;
bindCharacteristic(device, ScryptedInterface.Brightness, service, Characteristic.TargetPosition,
() => targetPosition);
const props = {
minValue: 0,
maxValue: 100,
minStep: 1,
};
service.getCharacteristic(Characteristic.TargetPosition)
.setProps(props)
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
callback();
targetPosition = value as number;
device.setBrightness(targetPosition);
});
}
return accessory;
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/objectdetector",
"version": "0.1.39",
"version": "0.1.42",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/objectdetector",
"version": "0.1.39",
"version": "0.1.42",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/objectdetector",
"version": "0.1.39",
"version": "0.1.42",
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -1159,7 +1159,7 @@ export class ObjectDetectionPlugin extends AutoenableMixinProvider implements Se
async releaseDevice(id: string, nativeId: string): Promise<void> {
if (nativeId?.startsWith(SMART_MOTIONSENSOR_PREFIX)) {
const smart = this.devices.get(nativeId) as SmartMotionSensor;
smart?.listener?.removeListener();
smart?.detectionListener?.removeListener();
}
}

View File

@@ -1,9 +1,10 @@
import sdk, { Camera, EventListenerRegister, MediaObject, MotionSensor, ObjectDetector, ObjectsDetected, Readme, RequestPictureOptions, ResponsePictureOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, SettingValue, Settings } from "@scrypted/sdk";
import { StorageSetting, StorageSettings } from "@scrypted/sdk/storage-settings";
import type { ObjectDetectionPlugin } from "./main";
import { levenshteinDistance } from "./edit-distance";
import type { ObjectDetectionPlugin } from "./main";
export const SMART_MOTIONSENSOR_PREFIX = 'smart-motionsensor-';
export const SMART_OCCUPANCYSENSOR_PREFIX = 'smart-occupancysensor-';
export function createObjectDetectorStorageSetting(): StorageSetting {
return {
@@ -26,7 +27,7 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
},
detectionTimeout: {
title: 'Object Detection Timeout',
description: 'Duration in seconds the sensor will report motion, before resetting.',
description: 'Duration in seconds the sensor will report motion, before resetting. Setting this to 0 will reset the sensor when motion stops.',
type: 'number',
defaultValue: 60,
},
@@ -71,9 +72,17 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
type: 'number',
defaultValue: 2,
},
labelScore: {
group: 'Recognition',
title: 'Label Score',
description: 'The minimum score required for a label to trigger the motion sensor.',
type: 'number',
defaultValue: 0,
}
});
listener: EventListenerRegister;
detectionListener: EventListenerRegister;
motionListener: EventListenerRegister;
timeout: NodeJS.Timeout;
lastPicture: Promise<MediaObject>;
@@ -143,8 +152,10 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
trigger() {
this.resetTrigger();
const duration: number = this.storageSettings.values.detectionTimeout;
this.motionDetected = true;
const duration: number = this.storageSettings.values.detectionTimeout;
if (!duration)
return;
this.timeout = setTimeout(() => {
this.motionDetected = false;
}, duration * 1000);
@@ -152,12 +163,14 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
rebind() {
this.motionDetected = false;
this.listener?.removeListener();
this.listener = undefined;
this.detectionListener?.removeListener();
this.detectionListener = undefined;
this.motionListener?.removeListener();
this.motionListener = undefined;
this.resetTrigger();
const objectDetector: ObjectDetector & ScryptedDevice = this.storageSettings.values.objectDetector;
const objectDetector: ObjectDetector & MotionSensor & ScryptedDevice = this.storageSettings.values.objectDetector;
if (!objectDetector)
return;
@@ -167,13 +180,25 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
const console = sdk.deviceManager.getMixinConsole(objectDetector.id, this.nativeId);
this.listener = objectDetector.listen(ScryptedInterface.ObjectDetector, (source, details, data) => {
this.motionListener = objectDetector.listen({
event: ScryptedInterface.MotionSensor,
watch: true,
}, (source, details, data) => {
const duration: number = this.storageSettings.values.detectionTimeout;
if (duration)
return;
if (!objectDetector.motionDetected)
this.motionDetected = false;
});
this.detectionListener = objectDetector.listen(ScryptedInterface.ObjectDetector, (source, details, data) => {
const detected: ObjectsDetected = data;
if (this.storageSettings.values.requireDetectionThumbnail && !detected.detectionId)
return false;
const { labels, labelDistance } = this.storageSettings.values;
const { labels, labelDistance, labelScore } = this.storageSettings.values;
const match = detected.detections?.find(d => {
if (this.storageSettings.values.requireScryptedNvrDetections && !d.boundingBox)
@@ -208,13 +233,24 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
return false;
for (const label of labels) {
if (label === d.label)
return true;
if (label === d.label) {
if (!labelScore || d.labelScore >= labelScore)
return true;
this.console.log('Label score too low.', d.labelScore);
continue;
}
if (!labelDistance)
continue;
if (levenshteinDistance(label, d.label) <= labelDistance)
if (levenshteinDistance(label, d.label) > labelDistance) {
this.console.log('Label does not match.', label, d.label, d.labelScore);
continue;
}
if (!labelScore || d.labelScore >= labelScore)
return true;
this.console.log('Label does not match.', label, d.label);
this.console.log('Label score too low.', d.labelScore);
}
return false;

View File

@@ -1,10 +1,12 @@
{
// docker installation
// "scrypted.debugHost": "koushik-ubuntuvm",
// "scrypted.serverRoot": "/server",
"scrypted.debugHost": "koushik-ubuntuvm",
"scrypted.serverRoot": "/home/koush/.scrypted",
"scrypted.serverRoot": "/server",
// lxc
// "scrypted.debugHost": "scrypted-server",
// "scrypted.serverRoot": "/root/.scrypted",
// pi local installation
// "scrypted.debugHost": "192.168.2.119",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/openvino",
"version": "0.1.81",
"version": "0.1.103",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/openvino",
"version": "0.1.81",
"version": "0.1.103",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -33,6 +33,7 @@
"runtime": "python",
"type": "API",
"interfaces": [
"DeviceProvider",
"Settings",
"ObjectDetection",
"ObjectDetectionPreview"
@@ -41,5 +42,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.81"
"version": "0.1.103"
}

View File

@@ -1 +1 @@
../../tensorflow-lite/src/detect
../../openvino/src/detect/

View File

@@ -1,37 +1,45 @@
from __future__ import annotations
import ast
import asyncio
import concurrent.futures
import json
import platform
import sys
import threading
import traceback
from typing import Any, Tuple
import sys
import platform
import numpy as np
import onnxruntime
import scrypted_sdk
from PIL import Image
import ast
from scrypted_sdk.other import SettingValue
from scrypted_sdk.types import Setting
import concurrent.futures
import common.yolo as yolo
from predict import PredictPlugin
predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "ONNX-Predict")
from .face_recognition import ONNXFaceRecognition
try:
from .text_recognition import ONNXTextRecognition
except:
ONNXTextRecognition = None
availableModels = [
"Default",
"scrypted_yolov10m_320",
"scrypted_yolov10n_320",
"scrypted_yolo_nas_s_320",
"scrypted_yolov6n_320",
"scrypted_yolov6n",
"scrypted_yolov6s_320",
"scrypted_yolov6s",
"scrypted_yolov9c_320",
"scrypted_yolov9c",
"scrypted_yolov9s_320",
"scrypted_yolov9t_320",
"scrypted_yolov8n_320",
"scrypted_yolov8n",
]
def parse_labels(names):
j = ast.literal_eval(names)
ret = {}
@@ -49,51 +57,133 @@ class ONNXPlugin(
if model == "Default" or model not in availableModels:
if model != "Default":
self.storage.setItem("model", "Default")
model = "scrypted_yolov8n_320"
model = "scrypted_yolov9c_320"
self.yolo = "yolo" in model
self.scrypted_yolov10 = "scrypted_yolov10" in model
self.scrypted_yolo_nas = "scrypted_yolo_nas" in model
self.scrypted_yolo = "scrypted_yolo" in model
self.scrypted_model = "scrypted" in model
print(f"model {model}")
onnxmodel = "best" if self.scrypted_model else model
onnxmodel = model if self.scrypted_yolo_nas else "best" if self.scrypted_model else model
model_version = "v2"
model_version = "v3"
onnxfile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/onnx-models/main/{model}/{onnxmodel}.onnx",
f"https://github.com/koush/onnx-models/raw/main/{model}/{onnxmodel}.onnx",
f"{model_version}/{model}/{onnxmodel}.onnx",
)
print(onnxfile)
deviceIds = self.storage.getItem("deviceIds") or '["0"]'
deviceIds = json.loads(deviceIds)
if not len(deviceIds):
deviceIds = ["0"]
self.deviceIds = deviceIds
compiled_models = []
self.compiled_models = {}
try:
sess_options = onnxruntime.SessionOptions()
for deviceId in deviceIds:
sess_options = onnxruntime.SessionOptions()
providers: list[str] = []
if sys.platform == 'darwin':
providers.append("CoreMLExecutionProvider")
if 'linux' in sys.platform and platform.machine() == 'x86_64':
providers.append("CUDAExecutionProvider")
providers: list[str] = []
if sys.platform == 'darwin':
providers.append("CoreMLExecutionProvider")
providers.append('CPUExecutionProvider')
if ('linux' in sys.platform or 'win' in sys.platform) and platform.machine() == 'x86_64':
deviceId = int(deviceId)
providers.append(("CUDAExecutionProvider", { "device_id": deviceId }))
providers.append('CPUExecutionProvider')
compiled_model = onnxruntime.InferenceSession(onnxfile, sess_options=sess_options, providers=providers)
compiled_models.append(compiled_model)
input = compiled_model.get_inputs()[0]
self.model_dim = input.shape[2]
self.input_name = input.name
self.labels = parse_labels(compiled_model.get_modelmeta().custom_metadata_map['names'])
self.compiled_model = onnxruntime.InferenceSession(onnxfile, sess_options=sess_options, providers=providers)
except:
import traceback
traceback.print_exc()
print("Reverting all settings.")
self.storage.removeItem("model")
self.storage.removeItem("deviceIds")
self.requestRestart()
input = self.compiled_model.get_inputs()[0]
self.model_dim = input.shape[2]
self.input_name = input.name
self.labels = parse_labels(self.compiled_model.get_modelmeta().custom_metadata_map['names'])
def executor_initializer():
thread_name = threading.current_thread().name
interpreter = compiled_models.pop()
self.compiled_models[thread_name] = interpreter
print('Runtime initialized on thread {}'.format(thread_name))
self.executor = concurrent.futures.ThreadPoolExecutor(
initializer=executor_initializer,
max_workers=len(compiled_models),
thread_name_prefix="onnx",
)
self.prepareExecutor = concurrent.futures.ThreadPoolExecutor(
max_workers=len(compiled_models),
thread_name_prefix="onnx-prepare",
)
self.faceDevice = None
self.textDevice = None
asyncio.ensure_future(self.prepareRecognitionModels(), loop=self.loop)
async def prepareRecognitionModels(self):
try:
devices = [
{
"nativeId": "facerecognition",
"type": scrypted_sdk.ScryptedDeviceType.Builtin.value,
"interfaces": [
scrypted_sdk.ScryptedInterface.ObjectDetection.value,
],
"name": "ONNX Face Recognition",
},
]
if ONNXTextRecognition:
devices.append(
{
"nativeId": "textrecognition",
"type": scrypted_sdk.ScryptedDeviceType.Builtin.value,
"interfaces": [
scrypted_sdk.ScryptedInterface.ObjectDetection.value,
],
"name": "ONNX Text Recognition",
},
)
await scrypted_sdk.deviceManager.onDevicesChanged(
{
"devices": devices,
}
)
except:
pass
async def getDevice(self, nativeId: str) -> Any:
if nativeId == "facerecognition":
self.faceDevice = self.faceDevice or ONNXFaceRecognition(self, nativeId)
return self.faceDevice
elif nativeId == "textrecognition":
self.textDevice = self.textDevice or ONNXTextRecognition(self, nativeId)
return self.textDevice
raise Exception("unknown device")
async def getSettings(self) -> list[Setting]:
model = self.storage.getItem("model") or "Default"
deviceIds = self.storage.getItem("deviceIds") or '["0"]'
deviceIds = json.loads(deviceIds)
return [
{
"key": "model",
@@ -102,9 +192,26 @@ class ONNXPlugin(
"choices": availableModels,
"value": model,
},
{
"key": "deviceIds",
"title": "Device IDs",
"description": "Optional: Assign multiple CUDA Device IDs to use for detection.",
"choices": deviceIds,
"combobox": True,
"multiple": True,
"value": deviceIds,
},
{
"key": "execution_device",
"title": "Execution Device",
"readonly": True,
"value": onnxruntime.get_device(),
}
]
async def putSetting(self, key: str, value: SettingValue):
if (key == 'deviceIds'):
value = json.dumps(value)
self.storage.setItem(key, value)
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Settings.value, None)
self.requestRestart()
@@ -117,25 +224,32 @@ class ONNXPlugin(
return [self.model_dim, self.model_dim]
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
def predict(input_tensor):
output_tensors = self.compiled_model.run(None, { self.input_name: input_tensor })
objs = yolo.parse_yolov9(output_tensors[0][0])
return objs
def prepare():
im = np.array(input)
im = np.expand_dims(input, axis=0)
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
return im
im = np.array(input)
im = np.stack([input])
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
input_tensor = im
def predict(input_tensor):
compiled_model = self.compiled_models[threading.current_thread().name]
output_tensors = compiled_model.run(None, { self.input_name: input_tensor })
if self.scrypted_yolov10:
return yolo.parse_yolov10(output_tensors[0][0])
if self.scrypted_yolo_nas:
return yolo.parse_yolo_nas([output_tensors[1], output_tensors[0]])
return yolo.parse_yolov9(output_tensors[0][0])
try:
input_tensor = await asyncio.get_event_loop().run_in_executor(
self.prepareExecutor, lambda: prepare()
)
objs = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: predict(input_tensor)
self.executor, lambda: predict(input_tensor)
)
except:
import traceback
traceback.print_exc()
raise

View File

@@ -0,0 +1,112 @@
from __future__ import annotations
import asyncio
import concurrent.futures
import platform
import sys
import threading
import numpy as np
import onnxruntime
from PIL import Image
from predict.face_recognize import FaceRecognizeDetection
class ONNXFaceRecognition(FaceRecognizeDetection):
def __init__(self, plugin, nativeId: str | None = None):
self.plugin = plugin
super().__init__(nativeId=nativeId)
def downloadModel(self, model: str):
onnxmodel = "best" if "scrypted" in model else model
model_version = "v1"
onnxfile = self.downloadFile(
f"https://github.com/koush/onnx-models/raw/main/{model}/{onnxmodel}.onnx",
f"{model_version}/{model}/{onnxmodel}.onnx",
)
print(onnxfile)
compiled_models_array = []
compiled_models = {}
deviceIds = self.plugin.deviceIds
for deviceId in deviceIds:
sess_options = onnxruntime.SessionOptions()
providers: list[str] = []
if sys.platform == "darwin":
providers.append("CoreMLExecutionProvider")
if "linux" in sys.platform and platform.machine() == "x86_64":
deviceId = int(deviceId)
providers.append(("CUDAExecutionProvider", {"device_id": deviceId}))
providers.append("CPUExecutionProvider")
compiled_model = onnxruntime.InferenceSession(
onnxfile, sess_options=sess_options, providers=providers
)
compiled_models_array.append(compiled_model)
input = compiled_model.get_inputs()[0]
input_name = input.name
def executor_initializer():
thread_name = threading.current_thread().name
interpreter = compiled_models_array.pop()
compiled_models[thread_name] = interpreter
print("Runtime initialized on thread {}".format(thread_name))
executor = concurrent.futures.ThreadPoolExecutor(
initializer=executor_initializer,
max_workers=len(compiled_models_array),
thread_name_prefix="face",
)
prepareExecutor = concurrent.futures.ThreadPoolExecutor(
max_workers=len(compiled_models_array),
thread_name_prefix="face-prepare",
)
return compiled_models, input_name, prepareExecutor, executor
async def predictDetectModel(self, input: Image.Image):
compiled_models, input_name, prepareExecutor, executor = self.detectModel
def prepare():
im = np.array(input)
im = np.expand_dims(input, axis=0)
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
return im
def predict(input_tensor):
compiled_model = compiled_models[threading.current_thread().name]
output_tensors = compiled_model.run(None, {input_name: input_tensor})
return output_tensors
input_tensor = await asyncio.get_event_loop().run_in_executor(
prepareExecutor, lambda: prepare()
)
objs = await asyncio.get_event_loop().run_in_executor(
executor, lambda: predict(input_tensor)
)
return objs[0][0]
async def predictFaceModel(self, input: np.ndarray):
compiled_models, input_name, prepareExecutor, executor = self.faceModel
def predict():
compiled_model = compiled_models[threading.current_thread().name]
output_tensors = compiled_model.run(None, {input_name: input})
return output_tensors
objs = await asyncio.get_event_loop().run_in_executor(
executor, lambda: predict()
)
return objs[0]

View File

@@ -0,0 +1,102 @@
from __future__ import annotations
import asyncio
import concurrent.futures
import platform
import sys
import threading
import numpy as np
import onnxruntime
from PIL import Image
from predict.text_recognize import TextRecognition
class ONNXTextRecognition(TextRecognition):
def __init__(self, plugin, nativeId: str | None = None):
self.plugin = plugin
super().__init__(nativeId=nativeId)
def downloadModel(self, model: str):
onnxmodel = model
model_version = "v4"
onnxfile = self.downloadFile(
f"https://github.com/koush/onnx-models/raw/main/{model}/{onnxmodel}.onnx",
f"{model_version}/{model}/{onnxmodel}.onnx",
)
print(onnxfile)
compiled_models_array = []
compiled_models = {}
deviceIds = self.plugin.deviceIds
for deviceId in deviceIds:
sess_options = onnxruntime.SessionOptions()
providers: list[str] = []
if sys.platform == "darwin":
providers.append("CoreMLExecutionProvider")
if "linux" in sys.platform and platform.machine() == "x86_64":
deviceId = int(deviceId)
providers.append(("CUDAExecutionProvider", {"device_id": deviceId}))
providers.append("CPUExecutionProvider")
compiled_model = onnxruntime.InferenceSession(
onnxfile, sess_options=sess_options, providers=providers
)
compiled_models_array.append(compiled_model)
input = compiled_model.get_inputs()[0]
input_name = input.name
def executor_initializer():
thread_name = threading.current_thread().name
interpreter = compiled_models_array.pop()
compiled_models[thread_name] = interpreter
print("Runtime initialized on thread {}".format(thread_name))
executor = concurrent.futures.ThreadPoolExecutor(
initializer=executor_initializer,
max_workers=len(compiled_models_array),
thread_name_prefix="text",
)
prepareExecutor = concurrent.futures.ThreadPoolExecutor(
max_workers=len(compiled_models_array),
thread_name_prefix="text-prepare",
)
return compiled_models, input_name, prepareExecutor, executor
async def predictDetectModel(self, input: Image.Image):
compiled_models, input_name, prepareExecutor, executor = self.detectModel
def predict():
compiled_model = compiled_models[threading.current_thread().name]
output_tensors = compiled_model.run(None, {input_name: input})
return output_tensors
objs = await asyncio.get_event_loop().run_in_executor(
executor, lambda: predict()
)
return objs[0]
async def predictTextModel(self, input: np.ndarray):
input = input.astype(np.float32)
compiled_models, input_name, prepareExecutor, executor = self.textModel
def predict():
compiled_model = compiled_models[threading.current_thread().name]
output_tensors = compiled_model.run(None, {input_name: input})
return output_tensors
objs = await asyncio.get_event_loop().run_in_executor(
executor, lambda: predict()
)
return objs[0]

View File

@@ -1 +1 @@
../../tensorflow-lite/src/predict
../../openvino/src/predict

View File

@@ -0,0 +1 @@
opencv-python==4.10.0.82

View File

@@ -1,12 +1,13 @@
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
numpy==1.26.4
# uncomment to require cuda 12, but most stuff is still targetting cuda 11.
# however, stuff targetted for cuda 11 can still run on cuda 12.
# --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-12/pypi/simple/
onnxruntime-gpu; 'linux' in sys_platform and platform_machine == 'x86_64'
# cpu and coreml execution provider
onnxruntime; 'linux' not in sys_platform or platform_machine != 'x86_64'
# nightly?
# ort-nightly-gpu==1.17.3.dev20240409002
# pillow-simd is available on x64 linux
# pillow-simd confirmed not building with arm64 linux or apple silicon
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
Pillow==10.3.0

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/opencv",
"version": "0.0.90",
"version": "0.0.91",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/opencv",
"version": "0.0.90",
"version": "0.0.91",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -37,5 +37,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.90"
"version": "0.0.91"
}

View File

@@ -1,13 +1,5 @@
# plugin
numpy>=1.16.2
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
numpy==1.26.4
imutils>=0.5.0
# locked to version because 4.8.0.76 is broken.
# todo: check newer versions.
opencv-python==4.8.0.74
# pillow-simd is available on x64 linux
# pillow-simd confirmed not building with arm64 linux or apple silicon
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
opencv-python==4.10.0.82
Pillow==10.3.0

View File

@@ -4,13 +4,17 @@
// "scrypted.debugHost": "koushik-ubuntu",
// "scrypted.serverRoot": "/server",
// proxmox installation
"scrypted.debugHost": "scrypted-server",
"scrypted.serverRoot": "/root/.scrypted",
// pi local installation
// "scrypted.debugHost": "192.168.2.119",
// "scrypted.serverRoot": "/home/pi/.scrypted",
// local checkout
"scrypted.debugHost": "127.0.0.1",
"scrypted.serverRoot": "/Users/koush/.scrypted",
// "scrypted.debugHost": "127.0.0.1",
// "scrypted.serverRoot": "/Users/koush/.scrypted",
// "scrypted.debugHost": "koushik-winvm",
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/openvino",
"version": "0.1.80",
"version": "0.1.104",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/openvino",
"version": "0.1.80",
"version": "0.1.104",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -42,5 +42,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.80"
"version": "0.1.104"
}

View File

@@ -49,7 +49,7 @@ def calculate_y_change(original_height, skew_angle_radians):
return y_change
async def prepare_text_result(d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float):
async def prepare_text_result(d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float, deskew_height: float):
textImage = await crop_text(d, image)
skew_height_change = calculate_y_change(d["boundingBox"][3], skew_angle)
@@ -57,19 +57,29 @@ async def prepare_text_result(d: ObjectDetectionResult, image: scrypted_sdk.Imag
textImage = skew_image(textImage, skew_angle)
# crop skew_height_change from top
if skew_height_change > 0:
textImage = textImage.crop((0, 0, textImage.width, textImage.height - skew_height_change))
textImage = textImage.crop((0, 0, textImage.width, deskew_height))
elif skew_height_change < 0:
textImage = textImage.crop((0, -skew_height_change, textImage.width, textImage.height))
textImage = textImage.crop((0, textImage.height - deskew_height, textImage.width, textImage.height))
new_height = 64
target_height = 64
height_padding = 3
new_height = target_height - height_padding * 2
new_width = int(textImage.width * new_height / textImage.height)
textImage = textImage.resize((new_width, new_height), resample=Image.LANCZOS).convert("L")
new_width = 256
new_width = 384
# average the top pixels
edge_color = textImage.getpixel((0, textImage.height // 2))
# average the bottom pixels
edge_color += textImage.getpixel((textImage.width - 1, textImage.height // 2))
# average the right pixels
edge_color += textImage.getpixel((textImage.width // 2, 0))
# average the left pixels
edge_color += textImage.getpixel((textImage.width // 2, textImage.height - 1))
edge_color = edge_color // 4
# calculate padding dimensions
padding = (0, 0, new_width - textImage.width, 0)
# todo: clamp entire edge rather than just center
edge_color = textImage.getpixel((textImage.width - 1, textImage.height // 2))
padding = (0, height_padding, new_width - textImage.width, height_padding)
# pad image
textImage = ImageOps.expand(textImage, padding, fill=edge_color)
# pil to numpy

View File

@@ -6,8 +6,54 @@ from predict.rectangle import Rectangle
defaultThreshold = .2
def parse_yolov9(results, threshold = defaultThreshold, scale = None, confidence_scale = None):
def parse_yolov10(results, threshold = defaultThreshold, scale = None, confidence_scale = None):
objs: list[Prediction] = []
keep = np.argwhere(results[4:] > threshold)
for indices in keep:
class_id = indices[0]
index = indices[1]
confidence = results[class_id + 4, index].astype(float)
l = results[0][index].astype(float)
t = results[1][index].astype(float)
r = results[2][index].astype(float)
b = results[3][index].astype(float)
if scale:
l = scale(l)
t = scale(t)
r = scale(r)
b = scale(b)
if confidence_scale:
confidence = confidence_scale(confidence)
obj = Prediction(
int(class_id),
confidence,
Rectangle(
l,
t,
r,
b,
),
)
objs.append(obj)
return objs
def parse_yolo_nas(predictions):
objs = []
for pred_scores, pred_bboxes in zip(*predictions):
i, j = np.nonzero(pred_scores > .5)
pred_bboxes = pred_bboxes[i]
pred_cls_conf = pred_scores[i, j]
pred_cls_label = j[:]
for box, conf, label in zip(pred_bboxes, pred_cls_conf, pred_cls_label):
obj = Prediction(
int(label), conf.astype(float), Rectangle(box[0].astype(float), box[1].astype(float), box[2].astype(float), box[3].astype(float))
)
objs.append(obj)
return objs
def parse_yolov9(results, threshold = defaultThreshold, scale = None, confidence_scale = None):
objs: list[Prediction] = []
keep = np.argwhere(results[4:] > threshold)
for indices in keep:
class_id = indices[0]

View File

@@ -1 +0,0 @@
../../tensorflow-lite/src/detect

View File

@@ -73,7 +73,6 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
if mediaObject.mimeType == ScryptedMimeTypes.Image.value:
image = await scrypted_sdk.sdk.connectRPCObject(mediaObject)
else:
print('non image provided')
image = await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(mediaObject, ScryptedMimeTypes.Image.value)
return await self.run_detection_image(image, session)

View File

@@ -1,8 +1,10 @@
from __future__ import annotations
import asyncio
import concurrent.futures
import json
import re
import traceback
from typing import Any, Tuple
import numpy as np
@@ -11,30 +13,33 @@ import scrypted_sdk
from PIL import Image
from scrypted_sdk.other import SettingValue
from scrypted_sdk.types import Setting
import concurrent.futures
import common.yolo as yolo
from predict import Prediction, PredictPlugin
from predict.rectangle import Rectangle
from .face_recognition import OpenVINOFaceRecognition
try:
from .text_recognition import OpenVINOTextRecognition
except:
OpenVINOTextRecognition = None
predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "OpenVINO-Predict")
prepareExecutor = concurrent.futures.ThreadPoolExecutor(1, "OpenVINO-Prepare")
availableModels = [
"Default",
"scrypted_yolov10m_320",
"scrypted_yolov10s_320",
"scrypted_yolov10n_320",
"scrypted_yolo_nas_s_320",
"scrypted_yolov6n_320",
"scrypted_yolov6n",
"scrypted_yolov6s_320",
"scrypted_yolov6s",
"scrypted_yolov9c_320",
"scrypted_yolov9c",
"scrypted_yolov9s_320",
"scrypted_yolov9t_320",
"scrypted_yolov8n_320",
"scrypted_yolov8n",
"ssd_mobilenet_v1_coco",
"ssdlite_mobilenet_v2",
"yolo-v3-tiny-tf",
@@ -121,10 +126,10 @@ class OpenVINOPlugin(
if using_mode == "AUTO":
if "GPU" in available_devices:
using_mode = "GPU"
if using_mode == "GPU":
precision = "FP16"
else:
precision = "FP32"
# FP16 is smaller and the default export. no tangible performance difference.
# https://docs.openvino.ai/2023.3/openvino_docs_OV_Converter_UG_Conversion_Options.html
precision = "FP16"
self.precision = precision
@@ -132,8 +137,10 @@ class OpenVINOPlugin(
if model == "Default" or model not in availableModels:
if model != "Default":
self.storage.setItem("model", "Default")
model = "scrypted_yolov8n_320"
model = "scrypted_yolov9t_320"
self.yolo = "yolo" in model
self.scrypted_yolov10 = "scrypted_yolov10" in model
self.scrypted_yolo_nas = "scrypted_yolo_nas" in model
self.scrypted_yolo = "scrypted_yolo" in model
self.scrypted_model = "scrypted" in model
self.sigmoid = model == "yolo-v4-tiny-tf"
@@ -144,26 +151,31 @@ class OpenVINOPlugin(
model_version = "v5"
xmlFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml",
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.xml",
f"{model_version}/{model}/{precision}/{ovmodel}.xml",
)
binFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin",
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.bin",
f"{model_version}/{model}/{precision}/{ovmodel}.bin",
)
if self.scrypted_model:
if self.scrypted_yolo_nas:
labelsFile = self.downloadFile(
"https://raw.githubusercontent.com/koush/openvino-models/main/scrypted_labels.txt",
"https://github.com/koush/openvino-models/raw/main/scrypted_nas_labels.txt",
"scrypted_nas_labels.txt",
)
elif self.scrypted_model:
labelsFile = self.downloadFile(
"https://github.com/koush/openvino-models/raw/main/scrypted_labels.txt",
"scrypted_labels.txt",
)
elif self.yolo:
labelsFile = self.downloadFile(
"https://raw.githubusercontent.com/koush/openvino-models/main/coco_80cl.txt",
"https://github.com/koush/openvino-models/raw/main/coco_80cl.txt",
"coco_80cl.txt",
)
else:
labelsFile = self.downloadFile(
"https://raw.githubusercontent.com/koush/openvino-models/main/coco_labels.txt",
"https://github.com/koush/openvino-models/raw/main/coco_labels.txt",
"coco_labels.txt",
)
@@ -194,6 +206,8 @@ class OpenVINOPlugin(
labels_contents = open(labelsFile, "r").read()
self.labels = parse_label_contents(labels_contents)
self.faceDevice = None
self.textDevice = None
asyncio.ensure_future(self.prepareRecognitionModels(), loop=self.loop)
async def getSettings(self) -> list[Setting]:
@@ -262,8 +276,11 @@ class OpenVINOPlugin(
objs = []
if self.scrypted_yolo:
objs = yolo.parse_yolov9(output_tensors[0][0])
return objs
if self.scrypted_yolov10:
return yolo.parse_yolov10(output_tensors[0][0])
if self.scrypted_yolo_nas:
return yolo.parse_yolo_nas([output_tensors[1], output_tensors[0]])
return yolo.parse_yolov9(output_tensors[0][0])
if self.yolo:
# index 2 will always either be 13 or 26
@@ -314,30 +331,34 @@ class OpenVINOPlugin(
return objs
# the input_tensor can be created with the shared_memory=True parameter,
# but that seems to cause issues on some platforms.
if self.scrypted_yolo:
im = np.stack([input])
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
im = ov.Tensor(array=im)
input_tensor = im
elif self.yolo:
input_tensor = ov.Tensor(
array=np.expand_dims(np.array(input), axis=0).astype(np.float32)
)
else:
input_tensor = ov.Tensor(array=np.expand_dims(np.array(input), axis=0))
def prepare():
# the input_tensor can be created with the shared_memory=True parameter,
# but that seems to cause issues on some platforms.
if self.scrypted_yolo:
im = np.array(input)
im = np.expand_dims(input, axis=0)
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
input_tensor = ov.Tensor(array=im)
elif self.yolo:
input_tensor = ov.Tensor(
array=np.expand_dims(np.array(input), axis=0).astype(np.float32)
)
else:
input_tensor = ov.Tensor(array=np.expand_dims(np.array(input), axis=0))
return input_tensor
try:
input_tensor = await asyncio.get_event_loop().run_in_executor(
prepareExecutor, lambda: prepare()
)
objs = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: predict(input_tensor)
)
except:
import traceback
traceback.print_exc()
raise
@@ -379,7 +400,9 @@ class OpenVINOPlugin(
async def getDevice(self, nativeId: str) -> Any:
if nativeId == "facerecognition":
return OpenVINOFaceRecognition(self, nativeId)
self.faceDevice = self.faceDevice or OpenVINOFaceRecognition(self, nativeId)
return self.faceDevice
elif nativeId == "textrecognition":
return OpenVINOTextRecognition(self, nativeId)
self.textDevice = self.textDevice or OpenVINOTextRecognition(self, nativeId)
return self.textDevice
raise Exception("unknown device")

View File

@@ -0,0 +1,7 @@
import concurrent.futures
def create_executors(name: str):
prepare = concurrent.futures.ThreadPoolExecutor(1, "OpenVINO-{f}Prepare")
predict = concurrent.futures.ThreadPoolExecutor(1, "OpenVINO-{f}}Predict")
return prepare, predict

View File

@@ -1,23 +1,19 @@
from __future__ import annotations
import concurrent.futures
import openvino.runtime as ov
import asyncio
import numpy as np
import openvino.runtime as ov
from PIL import Image
from ov import async_infer
from predict.face_recognize import FaceRecognizeDetection
faceDetectPrepare, faceDetectPredict = async_infer.create_executors("FaceDetect")
faceRecognizePrepare, faceRecognizePredict = async_infer.create_executors(
"FaceRecognize"
)
def euclidean_distance(arr1, arr2):
return np.linalg.norm(arr1 - arr2)
def cosine_similarity(vector_a, vector_b):
dot_product = np.dot(vector_a, vector_b)
norm_a = np.linalg.norm(vector_a)
norm_b = np.linalg.norm(vector_b)
similarity = dot_product / (norm_a * norm_b)
return similarity
class OpenVINOFaceRecognition(FaceRecognizeDetection):
def __init__(self, plugin, nativeId: str | None = None):
@@ -30,42 +26,45 @@ class OpenVINOFaceRecognition(FaceRecognizeDetection):
precision = self.plugin.precision
model_version = "v5"
xmlFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml",
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.xml",
f"{model_version}/{model}/{precision}/{ovmodel}.xml",
)
binFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin",
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.bin",
f"{model_version}/{model}/{precision}/{ovmodel}.bin",
)
print(xmlFile, binFile)
return self.plugin.core.compile_model(xmlFile, self.plugin.mode)
def predictDetectModel(self, input):
infer_request = self.detectModel.create_infer_request()
im = np.stack([input])
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
im = ov.Tensor(array=im)
input_tensor = im
infer_request.set_input_tensor(input_tensor)
infer_request.start_async()
infer_request.wait()
return infer_request.output_tensors[0].data[0]
async def predictDetectModel(self, input: Image.Image):
def predict():
im = np.expand_dims(input, axis=0)
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
def predictFaceModel(self, input):
im = ov.Tensor(array=input)
infer_request = self.faceModel.create_infer_request()
infer_request.set_input_tensor(im)
infer_request.start_async()
infer_request.wait()
return infer_request.output_tensors[0].data[0]
infer_request = self.detectModel.create_infer_request()
tensor = ov.Tensor(array=im)
infer_request.set_input_tensor(tensor)
output_tensors = infer_request.infer()
ret = output_tensors[0][0]
return ret
def predictTextModel(self, input):
input = input.astype(np.float32)
im = ov.Tensor(array=input)
infer_request = self.textModel.create_infer_request()
infer_request.set_input_tensor(im)
infer_request.start_async()
infer_request.wait()
return infer_request.output_tensors[0].data
ret = await asyncio.get_event_loop().run_in_executor(
faceDetectPredict, lambda: predict()
)
return ret
async def predictFaceModel(self, input: np.ndarray):
def predict():
im = ov.Tensor(array=input)
infer_request = self.faceModel.create_infer_request()
infer_request.set_input_tensor(im)
output_tensors = infer_request.infer()
ret = output_tensors[0]
return ret
ret = await asyncio.get_event_loop().run_in_executor(
faceRecognizePredict, lambda: predict()
)
return ret

View File

@@ -1,10 +1,18 @@
from __future__ import annotations
import openvino.runtime as ov
import numpy as np
import asyncio
import numpy as np
import openvino.runtime as ov
from ov import async_infer
from predict.text_recognize import TextRecognition
textDetectPrepare, textDetectPredict = async_infer.create_executors("TextDetect")
textRecognizePrepare, textRecognizePredict = async_infer.create_executors(
"TextRecognize"
)
class OpenVINOTextRecognition(TextRecognition):
def __init__(self, plugin, nativeId: str | None = None):
@@ -15,32 +23,43 @@ class OpenVINOTextRecognition(TextRecognition):
def downloadModel(self, model: str):
ovmodel = "best"
precision = self.plugin.precision
model_version = "v5"
model_version = "v6"
xmlFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml",
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.xml",
f"{model_version}/{model}/{precision}/{ovmodel}.xml",
)
binFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin",
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.bin",
f"{model_version}/{model}/{precision}/{ovmodel}.bin",
)
print(xmlFile, binFile)
return self.plugin.core.compile_model(xmlFile, self.plugin.mode)
def predictDetectModel(self, input):
infer_request = self.detectModel.create_infer_request()
im = ov.Tensor(array=input)
input_tensor = im
infer_request.set_input_tensor(input_tensor)
infer_request.start_async()
infer_request.wait()
return infer_request.output_tensors[0].data
async def predictDetectModel(self, input: np.ndarray):
def predict():
infer_request = self.detectModel.create_infer_request()
im = ov.Tensor(array=input)
input_tensor = im
infer_request.set_input_tensor(input_tensor)
output_tensors = infer_request.infer()
ret = output_tensors[0]
return ret
def predictTextModel(self, input):
input = input.astype(np.float32)
im = ov.Tensor(array=input)
infer_request = self.textModel.create_infer_request()
infer_request.set_input_tensor(im)
infer_request.start_async()
infer_request.wait()
return infer_request.output_tensors[0].data
ret = await asyncio.get_event_loop().run_in_executor(
textDetectPredict, lambda: predict()
)
return ret
async def predictTextModel(self, input: np.ndarray):
def predict():
im = ov.Tensor(array=input.astype(np.float32))
infer_request = self.textModel.create_infer_request()
infer_request.set_input_tensor(im)
output_tensors = infer_request.infer()
ret = output_tensors[0]
return ret
ret = await asyncio.get_event_loop().run_in_executor(
textDetectPredict, lambda: predict()
)
return ret

View File

@@ -1 +0,0 @@
../../tensorflow-lite/src/predict

View File

@@ -145,8 +145,8 @@ class PredictPlugin(DetectPlugin):
for i, result in enumerate(results):
batch[i][1].set_result(result)
except Exception as e:
for i, result in enumerate(results):
batch[i][1].set_exception(e)
for input in batch:
input[1].set_exception(e)
async def flush_batch(self):
self.batch_flush = None

View File

@@ -38,6 +38,7 @@ def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text,
nLabels, labels, stats, centroids = cv2.connectedComponentsWithStats(text_score_comb.astype(np.uint8), connectivity=4)
det = []
scores = []
mapper = []
for k in range(1,nLabels):
# size filtering
@@ -45,7 +46,8 @@ def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text,
if size < 10: continue
# thresholding
if np.max(textmap[labels==k]) < text_threshold: continue
score = np.max(textmap[labels==k])
if score < text_threshold: continue
# make segmentation map
segmap = np.zeros(textmap.shape, dtype=np.uint8)
@@ -89,8 +91,9 @@ def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text,
box = np.array(box)
det.append(box)
scores.append(score)
return det, labels, mapper
return det, labels, mapper, scores
def getPoly_core(boxes, labels, mapper, linkmap):
# configs
@@ -241,14 +244,14 @@ def getPoly_core(boxes, labels, mapper, linkmap):
def getDetBoxes(textmap, linkmap, text_threshold, link_threshold, low_text, poly=False, estimate_num_chars=False):
if poly and estimate_num_chars:
raise Exception("Estimating the number of characters not currently supported with poly.")
boxes, labels, mapper = getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text, estimate_num_chars)
boxes, labels, mapper, scores = getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text, estimate_num_chars)
if poly:
polys = getPoly_core(boxes, labels, mapper, linkmap)
else:
polys = [None] * len(boxes)
return boxes, polys, mapper
return boxes, polys, mapper, scores
def adjustResultCoordinates(polys, ratio_w, ratio_h, ratio_net = 2):
if len(polys) > 0:

View File

@@ -1,34 +1,19 @@
from __future__ import annotations
import asyncio
from asyncio import Future
import base64
import concurrent.futures
import os
from typing import Any, Tuple, List
import traceback
from asyncio import Future
from typing import Any, List, Tuple
import numpy as np
# import Quartz
import scrypted_sdk
# from Foundation import NSData, NSMakeSize
from PIL import Image
from scrypted_sdk import (
Setting,
SettingValue,
ObjectDetectionSession,
ObjectsDetected,
ObjectDetectionResult,
)
import traceback
from scrypted_sdk import (ObjectDetectionResult, ObjectDetectionSession,
ObjectsDetected)
# import Vision
from predict import PredictPlugin
from common import yolo
from common.text import prepare_text_result, process_text_result
def euclidean_distance(arr1, arr2):
return np.linalg.norm(arr1 - arr2)
from predict import PredictPlugin
def cosine_similarity(vector_a, vector_b):
dot_product = np.dot(vector_a, vector_b)
@@ -37,39 +22,25 @@ def cosine_similarity(vector_a, vector_b):
similarity = dot_product / (norm_a * norm_b)
return similarity
predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "Recognize")
class FaceRecognizeDetection(PredictPlugin):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
self.inputheight = 640
self.inputwidth = 640
self.inputheight = 320
self.inputwidth = 320
self.labels = {
0: "face",
1: "plate",
2: "text",
}
self.loop = asyncio.get_event_loop()
self.minThreshold = 0.7
self.minThreshold = 0.5
self.detectModel = self.downloadModel("scrypted_yolov9c_flt")
self.textModel = self.downloadModel("vgg_english_g2")
self.detectModel = self.downloadModel("scrypted_yolov9t_face_320")
self.faceModel = self.downloadModel("inception_resnet_v1")
def downloadModel(self, model: str):
pass
async def getSettings(self) -> list[Setting]:
pass
async def putSetting(self, key: str, value: SettingValue):
self.storage.setItem(key, value)
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Settings.value, None)
await scrypted_sdk.deviceManager.requestRestart()
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
return (self.inputwidth, self.inputheight, 3)
@@ -81,9 +52,7 @@ class FaceRecognizeDetection(PredictPlugin):
return "rgb"
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
results = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: self.predictDetectModel(input)
)
results = await self.predictDetectModel(input)
objs = yolo.parse_yolov9(results)
ret = self.create_detection_result(objs, src_size, cvss)
return ret
@@ -112,10 +81,7 @@ class FaceRecognizeDetection(PredictPlugin):
processed_tensor = (image_tensor - 127.5) / 128.0
processed_tensor = np.expand_dims(processed_tensor, axis=0)
output = await asyncio.get_event_loop().run_in_executor(
predictExecutor,
lambda: self.predictFaceModel(processed_tensor)
)
output = await self.predictFaceModel(processed_tensor)
b = output.tobytes()
embedding = base64.b64encode(b).decode("utf-8")
@@ -124,30 +90,13 @@ class FaceRecognizeDetection(PredictPlugin):
traceback.print_exc()
pass
def predictTextModel(self, input):
async def predictDetectModel(self, input: Image.Image):
pass
def predictDetectModel(self, input):
async def predictFaceModel(self, prepareTensor):
pass
def predictFaceModel(self, input):
pass
async def setLabel(self, d: ObjectDetectionResult, image: scrypted_sdk.Image):
try:
image_tensor = await prepare_text_result(d, image)
preds = await asyncio.get_event_loop().run_in_executor(
predictExecutor,
lambda: self.predictTextModel(image_tensor),
)
d['label'] = process_text_result(preds)
except Exception as e:
traceback.print_exc()
pass
async def run_detection_image(
self, image: scrypted_sdk.Image, detection_session: ObjectDetectionSession
) -> ObjectsDetected:
@@ -206,31 +155,26 @@ class FaceRecognizeDetection(PredictPlugin):
for d in ret["detections"]:
if d["className"] == "face":
futures.append(asyncio.ensure_future(self.setEmbedding(d, image)))
# elif d["className"] == "plate":
# futures.append(asyncio.ensure_future(self.setLabel(d, image)))
# elif d['className'] == 'text':
# futures.append(asyncio.ensure_future(self.setLabel(d, image)))
if len(futures):
await asyncio.wait(futures)
last = None
for d in ret['detections']:
if d["className"] != "face":
continue
check = d.get("embedding")
if check is None:
continue
# decode base64 string check
embedding = base64.b64decode(check)
embedding = np.frombuffer(embedding, dtype=np.float32)
if last is None:
last = embedding
continue
# convert to numpy float32 arrays
similarity = cosine_similarity(last, embedding)
print('similarity', similarity)
last = embedding
# last = None
# for d in ret['detections']:
# if d["className"] != "face":
# continue
# check = d.get("embedding")
# if check is None:
# continue
# # decode base64 string check
# embedding = base64.b64decode(check)
# embedding = np.frombuffer(embedding, dtype=np.float32)
# if last is None:
# last = embedding
# continue
# # convert to numpy float32 arrays
# similarity = cosine_similarity(last, embedding)
# print('similarity', similarity)
# last = embedding
return ret

View File

@@ -41,10 +41,10 @@ class TextRecognition(PredictPlugin):
def downloadModel(self, model: str):
pass
def predictDetectModel(self, input):
async def predictDetectModel(self, input: np.ndarray):
pass
def predictTextModel(self, input):
async def predictTextModel(self, input: np.ndarray):
pass
async def detect_once(
@@ -56,25 +56,23 @@ class TextRecognition(PredictPlugin):
# add extra dimension to tensor
image_tensor = np.expand_dims(image_tensor, axis=0)
y = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: self.predictDetectModel(image_tensor)
)
y = await self.predictDetectModel(image_tensor)
estimate_num_chars = False
ratio_h = ratio_w = 1
text_threshold = 0.7
link_threshold = 0.9
low_text = 0.4
low_text = 0.5
poly = False
boxes_list, polys_list = [], []
boxes_list, polys_list, scores_list = [], [], []
for out in y:
# make score and link map
score_text = out[:, :, 0]
score_link = out[:, :, 1]
# Post-processing
boxes, polys, mapper = getDetBoxes(
boxes, polys, mapper, scores = getDetBoxes(
score_text,
score_link,
text_threshold,
@@ -98,18 +96,19 @@ class TextRecognition(PredictPlugin):
if polys[k] is None:
polys[k] = boxes[k]
boxes_list.append(boxes)
scores_list.append(scores)
polys_list.append(polys)
preds: List[Prediction] = []
for boxes in boxes_list:
for box in boxes:
for boxes, scores in zip(boxes_list, scores_list):
for box, score in zip(boxes, scores):
tl, tr, br, bl = box
l = min(tl[0], bl[0])
t = min(tl[1], tr[1])
r = max(tr[0], br[0])
b = max(bl[1], br[1])
pred = Prediction(0, 1, Rectangle(l, t, r, b))
pred = Prediction(0, float(score), Rectangle(l, t, r, b))
preds.append(pred)
return self.create_detection_result(preds, src_size, cvss)
@@ -123,22 +122,23 @@ class TextRecognition(PredictPlugin):
futures: List[Future] = []
boundingBoxes = [d["boundingBox"] for d in detections]
boundingBoxes, scores = [d["boundingBox"] for d in detections], [d["score"] for d in detections]
if not len(boundingBoxes):
return ret
text_groups = find_adjacent_groups(boundingBoxes)
text_groups = find_adjacent_groups(boundingBoxes, scores)
detections = []
for group in text_groups:
boundingBox = group["union"]
score = group["score"]
d: ObjectDetectionResult = {
"boundingBox": boundingBox,
"score": 1,
"score": score,
"className": "text",
}
futures.append(
asyncio.ensure_future(self.setLabel(d, image, group["skew_angle"]))
asyncio.ensure_future(self.setLabel(d, image, group["skew_angle"], group['deskew_height']))
)
detections.append(d)
@@ -153,15 +153,11 @@ class TextRecognition(PredictPlugin):
return ret
async def setLabel(
self, d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float
self, d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float, deskew_height: float
):
try:
image_tensor = await prepare_text_result(d, image, skew_angle)
preds = await asyncio.get_event_loop().run_in_executor(
predictExecutor,
lambda: self.predictTextModel(image_tensor),
)
image_tensor = await prepare_text_result(d, image, skew_angle, deskew_height)
preds = await self.predictTextModel(image_tensor)
d["label"] = process_text_result(preds)
except Exception as e:

View File

@@ -43,42 +43,58 @@ def are_boxes_adjacent(box1: BoundingBox, box2: BoundingBox):
return False
def find_adjacent_groups(boxes: List[BoundingBox]) -> List[dict]:
def find_adjacent_groups(boxes: List[BoundingBox], scores: List[float]) -> List[dict]:
groups = []
# sort boxes left to right
boxes = sorted(boxes, key=lambda box: box[0])
for box in boxes:
for index, box in enumerate(boxes):
added_to_group = False
for group in groups:
for other_box in group["boxes"]:
if are_boxes_adjacent(box, other_box):
group["boxes"].append(box)
group["scores"].append(scores[index])
added_to_group = True
break
if added_to_group:
break
if not added_to_group:
groups.append({"boxes": [box], "skew_angle": 0})
groups.append({"boxes": [box], "scores": [scores[index]]})
# Calculate the skew angle of each group
for group in groups:
boxes = group["boxes"]
group["union"] = union_boxes(boxes)
if len(boxes) -1 :
lm = (boxes[0][1] + boxes[0][3]) / 2
rm = (boxes[-1][1] + boxes[-1][3]) / 2
dx = (boxes[-1][0]) - (boxes[0][0] + boxes[0][2])
if len(boxes) - 1:
lm = boxes[0][1] + boxes[0][3] / 2
rm = boxes[-1][1] + boxes[-1][3] / 2
dx = (boxes[-1][0]) - (boxes[0][0])
minx = min([box[0] for box in boxes])
maxx = max([box[0] + box[2] for box in boxes])
# denoise by filtering the box height
minh = min([box[3] for box in boxes])
median_height = sorted([box[3] for box in boxes])[len(boxes) // 2]
maxh = max([box[3] for box in boxes])
pad_height = maxh * 0.05
filter_height = median_height
pad_height = filter_height * 0.05
dx = maxx - minx
group['skew_angle'] = math.atan2(rm - lm, dx) * 2
group['skew_angle'] = math.atan((rm - lm) / dx)
group['deskew_height'] = filter_height + pad_height * 2
# pad this box by a few pixels
group['union'] = (group['union'][0] - pad_height, group['union'][1] - pad_height, group['union'][2] + pad_height * 2, group['union'][3] + pad_height * 2)
group['union'] = (
group['union'][0] - pad_height,
group['union'][1] - pad_height,
group['union'][2] + pad_height * 2,
group['union'][3] + pad_height * 2)
# average the scores
group['score'] = sum(group['scores']) / len(group['scores'])
else:
group['skew_angle'] = 0
group['deskew_height'] = boxes[0][3]
group['score'] = group['scores'][0]
return groups

View File

@@ -1 +1 @@
opencv-python
opencv-python==4.10.0.82

View File

@@ -1,7 +1,4 @@
# 2024-04-23 - modify timestamp to force pip reinstall
openvino==2024.0.0
# pillow-simd is available on x64 linux
# pillow-simd confirmed not building with arm64 linux or apple silicon
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
numpy==1.26.4
openvino==2024.1.0
Pillow==10.3.0

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/prebuffer-mixin",
"version": "0.10.18",
"version": "0.10.27",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/prebuffer-mixin",
"version": "0.10.18",
"version": "0.10.27",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/prebuffer-mixin",
"version": "0.10.18",
"version": "0.10.27",
"description": "Video Stream Rebroadcast, Prebuffer, and Management Plugin for Scrypted.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -1,29 +1,22 @@
import { cloneDeep } from '@scrypted/common/src/clone-deep';
import { Deferred } from "@scrypted/common/src/deferred";
import { listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
import { createActivityTimeout } from '@scrypted/common/src/activity-timeout';
import { createRtspParser } from "@scrypted/common/src/rtsp-server";
import { parseSdp } from "@scrypted/common/src/sdp-utils";
import { StreamChunk, StreamParser } from '@scrypted/common/src/stream-parser';
import sdk, { FFmpegInput, RequestMediaStreamOptions, ResponseMediaStreamOptions } from "@scrypted/sdk";
import child_process, { ChildProcess, StdioOptions } from 'child_process';
import { EventEmitter } from 'events';
import { Server } from 'net';
import { Duplex } from 'stream';
import { cloneDeep } from './clone-deep';
import { Deferred } from "./deferred";
import { listenZeroSingleClient } from './listen-cluster';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from './media-helpers';
import { createRtspParser } from "./rtsp-server";
import { parseSdp } from "./sdp-utils";
import { StreamChunk, StreamParser } from './stream-parser';
const { mediaManager } = sdk;
export interface ParserSession<T extends string> {
parserSpecific?: any;
sdp: Promise<Buffer[]>;
sdp: Promise<string>;
resetActivityTimer?: () => void,
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions): ResponseMediaStreamOptions;
inputAudioCodec?: string;
inputVideoCodec?: string;
inputVideoResolution?: {
width: number,
height: number,
},
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions, inputVideoCodec: string, inputAudioCodec: string): ResponseMediaStreamOptions;
start(): void;
kill(error?: Error): void;
killed: Promise<void>;
@@ -31,6 +24,7 @@ export interface ParserSession<T extends string> {
emit(container: T, chunk: StreamChunk): this;
on(container: T, callback: (chunk: StreamChunk) => void): this;
on(error: 'error', callback: (e: Error) => void): this;
removeListener(event: T | 'killed', callback: any): this;
once(event: T | 'killed', listener: (...args: any[]) => void): this;
}
@@ -102,65 +96,37 @@ export async function parseAudioCodec(cp: ChildProcess) {
export function setupActivityTimer(container: string, kill: (error?: Error) => void, events: {
once(event: 'killed', callback: () => void): void,
}, timeout: number) {
let dataTimeout: NodeJS.Timeout;
function dataKill() {
const ret = createActivityTimeout(timeout, () => {
const str = 'timeout waiting for data, killing parser session';
console.error(str, container);
kill(new Error(str));
}
let lastTime = Date.now();
function resetActivityTimer() {
lastTime = Date.now();
}
function clearActivityTimer() {
clearInterval(dataTimeout);
}
if (timeout) {
dataTimeout = setInterval(() => {
if (Date.now() > lastTime + timeout) {
clearInterval(dataTimeout);
dataTimeout = undefined;
dataKill();
}
}, timeout);
}
events.once('killed', () => clearInterval(dataTimeout));
resetActivityTimer();
return {
resetActivityTimer,
clearActivityTimer,
}
});
events.once('killed', () => ret.clearActivityTimer());
return ret;
}
export async function startParserSession<T extends string>(ffmpegInput: FFmpegInput, options: ParserOptions<T>): Promise<ParserSession<T>> {
const { console } = options;
let isActive = true;
const events = new EventEmitter();
// need this to prevent kill from throwing due to uncaught Error during cleanup
events.on('error', e => console.error('rebroadcast error', e));
let inputAudioCodec: string;
let inputVideoCodec: string;
let inputVideoResolution: string[];
events.on('error', () => {});
let sessionKilled: any;
const killed = new Promise<void>(resolve => {
sessionKilled = resolve;
});
const sdpDeferred = new Deferred<string>();
function kill(error?: Error) {
error ||= new Error('killed');
if (isActive) {
events.emit('killed');
events.emit('error', error || new Error('killed'));
events.emit('error', error);
}
if (!sdpDeferred.finished)
sdpDeferred.reject(error);
isActive = false;
sessionKilled();
safeKillFFmpeg(cp);
@@ -200,7 +166,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
try {
ensureActive(() => socket.destroy());
for await (const chunk of parser.parse(socket, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
for await (const chunk of parser.parse(socket, undefined, undefined)) {
events.emit(container, chunk);
resetActivityTimer();
}
@@ -247,7 +213,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
try {
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
for await (const chunk of parser.parse(pipe as any, undefined, undefined)) {
await deferredStart.promise;
events.emit(container, chunk);
resetActivityTimer();
@@ -261,17 +227,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
};
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
rtsp.sdp.then(sdp => {
const parsed = parseSdp(sdp);
const audio = parsed.msections.find(msection => msection.type === 'audio');
const video = parsed.msections.find(msection => msection.type === 'video');
inputVideoCodec = video?.codec;
inputAudioCodec = audio?.codec;
});
const sdp = new Deferred<Buffer[]>();
rtsp.sdp.then(r => sdp.resolve([Buffer.from(r)]));
killed.then(() => sdp.reject(new Error("ffmpeg killed before sdp could be parsed")));
rtsp.sdp.then(sdp => sdpDeferred.resolve(sdp));
start();
@@ -279,25 +235,13 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
start() {
deferredStart.resolve();
},
sdp: sdp.promise,
get inputAudioCodec() {
return inputAudioCodec;
},
get inputVideoCodec() {
return inputVideoCodec;
},
get inputVideoResolution() {
return {
width: parseInt(inputVideoResolution?.[2]),
height: parseInt(inputVideoResolution?.[3]),
}
},
sdp: sdpDeferred.promise,
get isActive() { return isActive },
kill(error?: Error) {
kill(error);
},
killed,
negotiateMediaStream: () => {
negotiateMediaStream: (requestMediaStream: RequestMediaStreamOptions, inputVideoCodec, inputAudioCodec) => {
const ret: ResponseMediaStreamOptions = cloneDeep(ffmpegInput.mediaStreamOptions) || {
id: undefined,
name: undefined,
@@ -339,64 +283,3 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
}
};
}
export interface Rebroadcaster {
server: Server;
port: number;
url: string;
clients: number;
}
export interface RebroadcastSessionCleanup {
(): void;
}
export interface RebroadcasterConnection {
writeData: (data: StreamChunk) => number;
destroy: () => void;
}
export interface RebroadcasterOptions {
connect?: (connection: RebroadcasterConnection) => RebroadcastSessionCleanup | undefined;
console?: Console;
idle?: {
timeout: number,
callback: () => void,
},
}
export function handleRebroadcasterClient(socket: Duplex, options?: RebroadcasterOptions) {
const firstWriteData = (data: StreamChunk) => {
if (data.startStream) {
socket.write(data.startStream)
}
connection.writeData = writeData;
return writeData(data);
}
const writeData = (data: StreamChunk) => {
for (const chunk of data.chunks) {
socket.write(chunk);
}
return socket.writableLength;
};
const destroy = () => {
const cb = cleanupCallback;
cleanupCallback = undefined;
socket.destroy();
cb?.();
}
const connection: RebroadcasterConnection = {
writeData: firstWriteData,
destroy,
};
let cleanupCallback = options?.connect(connection);
socket.once('close', () => {
destroy();
});
socket.on('error', e => options?.console?.log('client stream ended'));
}

View File

@@ -1,28 +1,30 @@
import path from 'path'
import { AutoenableMixinProvider } from '@scrypted/common/src/autoenable-mixin-provider';
import { getDebugModeH264EncoderArgs, getH264EncoderArgs } from '@scrypted/common/src/ffmpeg-hardware-acceleration';
import { addVideoFilterArguments } from '@scrypted/common/src/ffmpeg-helpers';
import { ParserOptions, ParserSession, handleRebroadcasterClient, startParserSession } from '@scrypted/common/src/ffmpeg-rebroadcast';
import { ListenZeroSingleClientTimeoutError, closeQuiet, listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { readLength } from '@scrypted/common/src/read-stream';
import { H264_NAL_TYPE_FU_B, H264_NAL_TYPE_IDR, H264_NAL_TYPE_MTAP16, H264_NAL_TYPE_MTAP32, H264_NAL_TYPE_RESERVED0, H264_NAL_TYPE_RESERVED30, H264_NAL_TYPE_RESERVED31, H264_NAL_TYPE_SEI, H264_NAL_TYPE_STAP_B, RtspServer, RtspTrack, createRtspParser, findH264NaluType, getNaluTypes, listenSingleRtspClient } from '@scrypted/common/src/rtsp-server';
import { addTrackControls, parseSdp } from '@scrypted/common/src/sdp-utils';
import { H264_NAL_TYPE_FU_B, H264_NAL_TYPE_IDR, H264_NAL_TYPE_MTAP16, H264_NAL_TYPE_MTAP32, H264_NAL_TYPE_RESERVED0, H264_NAL_TYPE_RESERVED30, H264_NAL_TYPE_RESERVED31, H264_NAL_TYPE_SEI, H264_NAL_TYPE_SPS, H264_NAL_TYPE_STAP_B, H265_NAL_TYPE_SPS, RtspServer, RtspTrack, createRtspParser, findH264NaluType, findH265NaluType, getNaluTypes, listenSingleRtspClient } from '@scrypted/common/src/rtsp-server';
import { addTrackControls, getSpsPps, parseSdp } from '@scrypted/common/src/sdp-utils';
import { SettingsMixinDeviceBase, SettingsMixinDeviceOptions } from "@scrypted/common/src/settings-mixin";
import { sleep } from '@scrypted/common/src/sleep';
import { StreamChunk, StreamParser } from '@scrypted/common/src/stream-parser';
import sdk, { BufferConverter, ChargeState, DeviceProvider, DeviceState, EventListenerRegister, FFmpegInput, H264Info, MediaObject, MediaStreamDestination, MediaStreamOptions, MixinProvider, RequestMediaStreamOptions, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, SettingValue, Settings, VideoCamera, VideoCameraConfiguration, WritableDeviceState } from '@scrypted/sdk';
import sdk, { BufferConverter, ChargeState, DeviceProvider, EventListenerRegister, FFmpegInput, H264Info, MediaObject, MediaStreamDestination, MediaStreamOptions, MixinProvider, RequestMediaStreamOptions, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, SettingValue, Settings, VideoCamera, VideoCameraConfiguration, WritableDeviceState } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import crypto from 'crypto';
import { once } from 'events';
import { parse as h264SpsParse } from "h264-sps-parser";
import net, { AddressInfo } from 'net';
import path from 'path';
import semver from 'semver';
import { Duplex } from 'stream';
import { Worker } from 'worker_threads';
import { ParserOptions, ParserSession, startParserSession } from './ffmpeg-rebroadcast';
import { FileRtspServer } from './file-rtsp-server';
import { getUrlLocalAdresses } from './local-addresses';
import { REBROADCAST_MIXIN_INTERFACE_TOKEN } from './rebroadcast-mixin-token';
import { connectRFC4571Parser, startRFC4571Parser } from './rfc4571';
import { RtspSessionParserSpecific, startRtspSession } from './rtsp-session';
import { getSpsResolution } from './sps-resolution';
import { createStreamSettings } from './stream-settings';
import { TRANSCODE_MIXIN_PROVIDER_NATIVE_ID, TranscodeMixinProvider, getTranscodeMixinProviderId } from './transcode-settings';
@@ -41,13 +43,6 @@ interface PrebufferStreamChunk extends StreamChunk {
time?: number;
}
type Prebuffers<T extends string> = {
[key in T]: PrebufferStreamChunk[];
}
type PrebufferParsers = 'rtsp';
const PrebufferParserValues: PrebufferParsers[] = ['rtsp'];
function hasOddities(h264Info: H264Info) {
const h264Oddities = h264Info.fuab
|| h264Info.mtap16
@@ -60,13 +55,13 @@ function hasOddities(h264Info: H264Info) {
return h264Oddities;
}
type PrebufferParsers = 'rtsp';
class PrebufferSession {
parserSessionPromise: Promise<ParserSession<PrebufferParsers>>;
parserSession: ParserSession<PrebufferParsers>;
prebuffers: Prebuffers<PrebufferParsers> = {
rtsp: [],
};
rtspPrebuffer: PrebufferStreamChunk[] = []
parsers: { [container: string]: StreamParser };
sdp: Promise<string>;
usingScryptedParser = false;
@@ -148,10 +143,10 @@ class PrebufferSession {
getDetectedIdrInterval() {
const durations: number[] = [];
if (this.prebuffers.rtsp.length) {
if (this.rtspPrebuffer.length) {
let last: number;
for (const chunk of this.prebuffers.rtsp) {
for (const chunk of this.rtspPrebuffer) {
if (findH264NaluType(chunk, H264_NAL_TYPE_IDR)) {
if (last)
durations.push(chunk.time - last);
@@ -176,9 +171,7 @@ class PrebufferSession {
}
clearPrebuffers() {
for (const prebuffer of PrebufferParserValues) {
this.prebuffers[prebuffer] = [];
}
this.rtspPrebuffer = [];
}
release() {
@@ -244,6 +237,58 @@ class PrebufferSession {
}
}
async parseCodecs(skipResolution?: boolean) {
const sdp = await this.parserSession.sdp;
const parsedSdp = parseSdp(sdp);
const videoSection = parsedSdp.msections.find(msection => msection.type === 'video');
const audioSection = parsedSdp.msections.find(msection => msection.type === 'audio');
const inputAudioCodec = audioSection?.codec;
const inputVideoCodec = videoSection.codec;
let inputVideoResolution: ReturnType<typeof getSpsResolution>;
if (!skipResolution) {
// scan the prebuffer for sps
for (const chunk of this.rtspPrebuffer) {
try {
let sps = findH264NaluType(chunk, H264_NAL_TYPE_SPS);
if (sps) {
const parsedSps = h264SpsParse(sps);
inputVideoResolution = getSpsResolution(parsedSps);
}
else if (!sps) {
// sps = findH265NaluType(chunk, H265_NAL_TYPE_SPS);
}
}
catch (e) {
}
}
if (!inputVideoResolution) {
try {
const spspps = getSpsPps(videoSection);
let { sps } = spspps;
if (sps) {
if (videoSection.codec === 'h264') {
const parsedSps = h264SpsParse(sps);
inputVideoResolution = getSpsResolution(parsedSps);
}
else if (videoSection.codec === 'h265') {
}
}
}
catch (e) {
}
}
}
return {
inputVideoCodec,
inputAudioCodec,
inputVideoResolution,
}
}
async getMixinSettings(): Promise<Setting[]> {
const settings: Setting[] = [];
@@ -251,7 +296,7 @@ class PrebufferSession {
let total = 0;
let start = 0;
for (const prebuffer of this.prebuffers.rtsp) {
for (const prebuffer of this.rtspPrebuffer) {
start = start || prebuffer.time;
for (const chunk of prebuffer.chunks) {
total += chunk.byteLength;
@@ -281,10 +326,10 @@ class PrebufferSession {
combobox: true,
},
{
title: 'FFmpeg Output Arguments Prefix',
title: 'FFmpeg Output Prefix',
group,
subgroup,
description: 'Optional/Advanced: Additional output arguments to pass to the ffmpeg command. These will be placed before the input arguments.',
description: 'Optional/Advanced: Additional output arguments to pass to the ffmpeg command. These will be placed before the output.',
key: this.ffmpegOutputArgumentsKey,
value: this.storage.getItem(this.ffmpegOutputArgumentsKey),
choices: [
@@ -347,8 +392,9 @@ class PrebufferSession {
};
if (session) {
const resolution = session.inputVideoResolution?.width && session.inputVideoResolution?.height
? `${session.inputVideoResolution?.width}x${session.inputVideoResolution?.height}`
const codecInfo = await this.parseCodecs();
const resolution = codecInfo.inputVideoResolution?.width && codecInfo.inputVideoResolution?.height
? `${codecInfo.inputVideoResolution?.width}x${codecInfo.inputVideoResolution?.height}`
: 'unknown';
const idrInterval = this.getDetectedIdrInterval();
@@ -368,7 +414,7 @@ class PrebufferSession {
subgroup,
title: 'Detected Video/Audio Codecs',
readonly: true,
value: (session?.inputVideoCodec?.toString() || 'unknown') + '/' + (session?.inputAudioCodec?.toString() || 'unknown'),
value: (codecInfo?.inputVideoCodec?.toString() || 'unknown') + '/' + (codecInfo?.inputAudioCodec?.toString() || 'unknown'),
description: 'Configuring your camera to H264 video, and audio to Opus or PCM-mulaw (G.711ulaw) is recommended.'
},
{
@@ -479,7 +525,6 @@ class PrebufferSession {
session = startRFC4571Parser(this.console, connectRFC4571Parser(url), sdp, mediaStreamOptions, {
timeout: 10000,
});
this.sdp = session.sdp.then(buffers => Buffer.concat(buffers).toString());
}
else {
const moBuffer = await mediaManager.convertMediaObjectToBuffer(mo, ScryptedMimeTypes.FFmpegInput);
@@ -510,7 +555,6 @@ class PrebufferSession {
audioSoftMuted,
rtspRequestTimeout: 10000,
});
this.sdp = session.sdp.then(buffers => Buffer.concat(buffers).toString());
}
else {
let acodec: string[];
@@ -558,13 +602,18 @@ class PrebufferSession {
// the rtsp parser should always stream copy unless audio is soft muted.
acodec,
});
this.sdp = rtspParser.sdp;
rbo.parsers.rtsp = rtspParser;
session = await startParserSession(ffmpegInput, rbo);
}
}
this.sdp = session.sdp;
session.on('error', e => {
if (!e.message?.startsWith('killed:'))
console.error('rebroadcast error', e)
});
if (this.usingScryptedParser && !isRfc4571) {
// watch the stream for 10 seconds to see if an weird nalu is encountered.
// if one is found and using scrypted parser as default, will need to restart rebroadcast to prevent
@@ -624,31 +673,6 @@ class PrebufferSession {
}
await session.sdp;
// complain to the user about the codec if necessary. upstream may send a audio
// stream but report none exists (to request muting).
if (!audioSoftMuted && advertisedAudioCodec && session.inputAudioCodec !== undefined
&& session.inputAudioCodec !== advertisedAudioCodec) {
this.console.warn('Audio codec plugin reported vs detected mismatch', advertisedAudioCodec, detectedAudioCodec);
}
const advertisedVideoCodec = mso?.video?.codec;
if (advertisedVideoCodec && session.inputVideoCodec !== undefined
&& session.inputVideoCodec !== advertisedVideoCodec) {
this.console.warn('Video codec plugin reported vs detected mismatch', advertisedVideoCodec, session.inputVideoCodec);
}
if (!session.inputAudioCodec) {
this.console.log('No audio stream detected.');
}
// set/update the detected codec, set it to null if no audio was found.
this.storage.setItem(this.lastDetectedAudioCodecKey, session.inputAudioCodec || 'null');
if (session.inputVideoCodec !== 'h264') {
this.console.error(`Video codec is not h264. If there are errors, try changing your camera's encoder output.`);
}
this.parserSession = session;
session.killed.finally(() => {
if (this.parserSession === session)
@@ -656,6 +680,32 @@ class PrebufferSession {
});
session.killed.finally(() => clearTimeout(this.inactivityTimeout));
const codecInfo = await this.parseCodecs();
// complain to the user about the codec if necessary. upstream may send a audio
// stream but report none exists (to request muting).
if (!audioSoftMuted && advertisedAudioCodec && codecInfo.inputAudioCodec !== undefined
&& codecInfo.inputAudioCodec !== advertisedAudioCodec) {
this.console.warn('Audio codec plugin reported vs detected mismatch', advertisedAudioCodec, detectedAudioCodec);
}
const advertisedVideoCodec = mso?.video?.codec;
if (advertisedVideoCodec && codecInfo.inputVideoCodec !== undefined
&& codecInfo.inputVideoCodec !== advertisedVideoCodec) {
this.console.warn('Video codec plugin reported vs detected mismatch', advertisedVideoCodec, codecInfo.inputVideoCodec);
}
if (!codecInfo.inputAudioCodec) {
this.console.log('No audio stream detected.');
}
// set/update the detected codec, set it to null if no audio was found.
this.storage.setItem(this.lastDetectedAudioCodecKey, codecInfo.inputAudioCodec || 'null');
if (codecInfo.inputVideoCodec !== 'h264') {
this.console.error(`Video codec is not h264. If there are errors, try changing your camera's encoder output.`);
}
// settings ui refresh
deviceManager.onMixinEvent(this.mixin.id, this.mixin, ScryptedInterface.Settings, undefined);
@@ -685,28 +735,26 @@ class PrebufferSession {
session.killed.finally(() => clearTimeout(refreshTimeout));
}
for (const container of PrebufferParserValues) {
let shifts = 0;
let prebufferContainer: PrebufferStreamChunk[] = this.prebuffers[container];
let shifts = 0;
let prebufferContainer: PrebufferStreamChunk[] = this.rtspPrebuffer;
session.on(container, (chunk: PrebufferStreamChunk) => {
const now = Date.now();
session.on('rtsp', (chunk: PrebufferStreamChunk) => {
const now = Date.now();
chunk.time = now;
prebufferContainer.push(chunk);
chunk.time = now;
prebufferContainer.push(chunk);
while (prebufferContainer.length && prebufferContainer[0].time < now - prebufferDurationMs) {
prebufferContainer.shift();
shifts++;
}
while (prebufferContainer.length && prebufferContainer[0].time < now - prebufferDurationMs) {
prebufferContainer.shift();
shifts++;
}
if (shifts > 100000) {
prebufferContainer = prebufferContainer.slice();
this.prebuffers[container] = prebufferContainer;
shifts = 0;
}
});
}
if (shifts > 100000) {
prebufferContainer = prebufferContainer.slice();
this.rtspPrebuffer = prebufferContainer;
shifts = 0;
}
});
session.start();
return session;
@@ -736,7 +784,7 @@ class PrebufferSession {
return;
}
this.console.log(this.streamName, 'terminating rebroadcast due to inactivity');
session.kill(new Error('stream inactivity'));
session.kill(new Error('killed: stream inactivity'));
}, 10000);
}
@@ -752,7 +800,7 @@ class PrebufferSession {
if (!this.activeClients && this.parserSessionPromise) {
this.console.log(this.streamName, 'terminating rebroadcast due to low battery or not charging')
const session = await this.parserSessionPromise;
session.kill(new Error('low battery or not charging'));
session.kill(new Error('killed: low battery or not charging'));
}
} else {
this.ensurePrebufferSession();
@@ -783,19 +831,24 @@ class PrebufferSession {
async handleRebroadcasterClient(options: {
findSyncFrame: boolean,
isActiveClient: boolean,
container: PrebufferParsers,
session: ParserSession<PrebufferParsers>,
socketPromise: Promise<Duplex>,
requestedPrebuffer: number,
filter?: (chunk: StreamChunk, prebuffer: boolean) => StreamChunk,
}) {
const { isActiveClient, container, session, socketPromise, requestedPrebuffer } = options;
const { isActiveClient, session, socketPromise, requestedPrebuffer } = options;
this.console.log('sending prebuffer', requestedPrebuffer);
let socket: Duplex;
try {
socket = await socketPromise;
if (!session.isActive) {
// session may be killed while waiting for socket.
socket.destroy();
throw new Error('session terminated before socket connected');
}
}
catch (e) {
// in case the client never connects, do an inactivity check.
@@ -820,70 +873,81 @@ class PrebufferSession {
this.inactivityCheck(session, isActiveClient);
});
handleRebroadcasterClient(socket, {
// console: this.console,
connect: (connection) => {
const now = Date.now();
const safeWriteData = (chunk: StreamChunk, prebuffer?: boolean) => {
if (options.filter) {
chunk = options.filter(chunk, prebuffer);
if (!chunk)
return;
}
const buffered = connection.writeData(chunk);
if (buffered > 100000000) {
this.console.log('more than 100MB has been buffered, did downstream die? killing connection.', this.streamName);
cleanup();
}
}
const cleanup = () => {
session.removeListener(container, safeWriteData);
session.removeListener('killed', cleanup);
connection.destroy();
}
session.on(container, safeWriteData);
session.once('killed', cleanup);
const prebufferContainer: PrebufferStreamChunk[] = this.prebuffers[container];
// if the requested container or the source container is not rtsp, use an exact seek.
// this works better when the requested container is mp4, and rtsp is the source.
// if starting on a sync frame, ffmpeg will skip the first segment while initializing
// on live sources like rtsp. the buffer before the sync frame stream will be enough
// for ffmpeg to analyze and start up in time for the sync frame.
// may be worth considering playing with a few other things to avoid this:
// mpeg-ts as a container (would need to write a muxer)
// specifying the buffer before the sync frame with probesize.
// If h264 oddities are detected, assume ffmpeg will be used.
if (container !== 'rtsp' || !options.findSyncFrame || this.getLastH264Oddities()) {
for (const chunk of prebufferContainer) {
if (chunk.time < now - requestedPrebuffer)
continue;
safeWriteData(chunk, true);
}
}
else {
const parser = this.parsers[container];
const filtered = prebufferContainer.filter(pb => pb.time >= now - requestedPrebuffer);
let availablePrebuffers = parser.findSyncFrame(filtered);
if (!availablePrebuffers) {
this.console.warn('Unable to find sync frame in rtsp prebuffer.');
availablePrebuffers = [];
}
else {
this.console.log('Found sync frame in rtsp prebuffer.');
}
for (const prebuffer of availablePrebuffers) {
safeWriteData(prebuffer, true);
}
}
return cleanup;
let writeData = (data: StreamChunk): number => {
if (data.startStream) {
socket.write(data.startStream)
}
})
const writeDataWithoutStartStream = (data: StreamChunk) => {
for (const chunk of data.chunks) {
socket.write(chunk);
}
return socket.writableLength;
};
writeData = writeDataWithoutStartStream;
return writeDataWithoutStartStream(data);
}
const safeWriteData = (chunk: StreamChunk, prebuffer?: boolean) => {
if (options.filter) {
chunk = options.filter(chunk, prebuffer);
if (!chunk)
return;
}
const buffered = writeData(chunk);
if (buffered > 100000000) {
this.console.log('more than 100MB has been buffered, did downstream die? killing connection.', this.streamName);
cleanup();
}
}
const cleanup = () => {
socket.destroy();
session.removeListener('rtsp', safeWriteData);
session.removeListener('killed', cleanup);
};
session.on('rtsp', safeWriteData);
session.once('killed', cleanup);
socket.once('close', () => {
cleanup();
});
// socket.on('error', e => this.console.log('client stream ended'));
const now = Date.now();
const prebufferContainer: PrebufferStreamChunk[] = this.rtspPrebuffer;
// if starting on a sync frame, ffmpeg will skip the first segment while initializing
// on live sources like rtsp. the buffer before the sync frame stream will be enough
// for ffmpeg to analyze and start up in time for the sync frame.
// If h264 oddities are detected, assume ffmpeg will be used.
if (!options.findSyncFrame || this.getLastH264Oddities()) {
for (const chunk of prebufferContainer) {
if (chunk.time < now - requestedPrebuffer)
continue;
safeWriteData(chunk, true);
}
}
else {
const parser = this.parsers['rtsp'];
const filtered = prebufferContainer.filter(pb => pb.time >= now - requestedPrebuffer);
let availablePrebuffers = parser.findSyncFrame(filtered);
if (!availablePrebuffers) {
this.console.warn('Unable to find sync frame in rtsp prebuffer.');
availablePrebuffers = [];
}
else {
this.console.log('Found sync frame in rtsp prebuffer.');
}
for (const prebuffer of availablePrebuffers) {
safeWriteData(prebuffer, true);
}
}
}
async getVideoStream(findSyncFrame: boolean, options?: RequestMediaStreamOptions) {
@@ -908,7 +972,8 @@ class PrebufferSession {
requestedPrebuffer = Math.min(defaultPrebuffer, this.getDetectedIdrInterval() || defaultPrebuffer);;
}
const mediaStreamOptions: ResponseMediaStreamOptions = session.negotiateMediaStream(options);
const codecInfo = await this.parseCodecs(true);
const mediaStreamOptions: ResponseMediaStreamOptions = session.negotiateMediaStream(options, codecInfo.inputVideoCodec, codecInfo.inputAudioCodec);
let sdp = await this.sdp;
if (!mediaStreamOptions.video?.h264Info && this.usingScryptedParser) {
mediaStreamOptions.video ||= {};
@@ -1010,8 +1075,6 @@ class PrebufferSession {
urls = await getUrlLocalAdresses(this.console, url);
}
const container = 'rtsp';
mediaStreamOptions.sdp = sdp;
const isActiveClient = options?.refresh !== false;
@@ -1019,7 +1082,6 @@ class PrebufferSession {
this.handleRebroadcasterClient({
findSyncFrame,
isActiveClient,
container,
requestedPrebuffer,
socketPromise,
session,
@@ -1037,15 +1099,15 @@ class PrebufferSession {
mediaStreamOptions.audio.sampleRate ||= audioSection.rtpmap.clock;
}
if (session.inputVideoResolution?.width && session.inputVideoResolution?.height) {
if (codecInfo.inputVideoResolution?.width && codecInfo.inputVideoResolution?.height) {
// this may be an audio only request.
if (mediaStreamOptions.video)
Object.assign(mediaStreamOptions.video, session.inputVideoResolution);
Object.assign(mediaStreamOptions.video, codecInfo.inputVideoResolution);
}
const now = Date.now();
let available = 0;
const prebufferContainer: PrebufferStreamChunk[] = this.prebuffers[container];
const prebufferContainer: PrebufferStreamChunk[] = this.rtspPrebuffer;
for (const prebuffer of prebufferContainer) {
if (prebuffer.time < now - requestedPrebuffer)
continue;
@@ -1066,11 +1128,11 @@ class PrebufferSession {
const ffmpegInput: FFmpegInput = {
url,
urls,
container,
container: 'rtsp',
inputArguments: [
...inputArguments,
...(this.parsers[container].inputArguments || []),
'-f', this.parsers[container].container,
...(this.parsers['rtsp'].inputArguments || []),
'-f', this.parsers['rtsp'].container,
'-i', url,
],
mediaStreamOptions,
@@ -1165,7 +1227,6 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
prebufferSession.handleRebroadcasterClient({
findSyncFrame: true,
isActiveClient: true,
container: 'rtsp',
session,
socketPromise: Promise.resolve(client),
requestedPrebuffer,
@@ -1704,45 +1765,16 @@ export class RebroadcastPlugin extends AutoenableMixinProvider implements MixinP
async getMixin(mixinDevice: any, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: WritableDeviceState) {
this.setHasEnabledMixin(mixinDeviceState.id);
// 8-11-2022
// old scrypted had a bug where mixin device state was not exposing properties like id correctly
// across rpc boundaries.
let fork = false;
try {
const info = await systemManager.getComponent('info');
const version = await info.getVersion();
fork = semver.gte(version, '0.2.5');
}
catch (e) {
}
const { id } = mixinDeviceState;
if (fork && sdk.fork && typeof mixinDeviceState.id === 'string') {
const forked = sdk.fork<RebroadcastPluginFork>();
const { worker } = forked;
try {
const result = await forked.result;
const mixin = await result.newPrebufferMixin(async () => this.transcodeStorageSettings.values, mixinDevice, mixinDeviceInterfaces, mixinDeviceState);
this.currentMixins.set(mixin, {
worker,
id,
});
return mixin;
}
catch (e) {
throw e;
}
}
else {
const ret = await newPrebufferMixin(async () => this.transcodeStorageSettings.values, mixinDevice, mixinDeviceInterfaces, mixinDeviceState);
this.currentMixins.set(ret, {
worker: undefined,
id,
});
return ret;
}
const forked = sdk.fork<RebroadcastPluginFork>();
const { worker } = forked;
const result = await forked.result;
const mixin = await result.newPrebufferMixin(async () => this.transcodeStorageSettings.values, mixinDevice, mixinDeviceInterfaces, mixinDeviceState);
this.currentMixins.set(mixin, {
worker,
id,
});
return mixin;
}
async releaseMixin(id: string, mixinDevice: PrebufferMixin) {

Some files were not shown because too many files have changed in this diff Show More