Compare commits

..

99 Commits

Author SHA1 Message Date
Koushik Dutta
35b4028a47 rpc: how did this ever work? 2023-12-21 21:37:11 -08:00
Koushik Dutta
bf6038a5d3 postbeta 2023-12-21 21:37:01 -08:00
Koushik Dutta
e1b2216543 docker: correct gh action 2023-12-21 21:25:10 -08:00
Koushik Dutta
89c1682421 docker: fixup 2023-12-21 21:24:41 -08:00
Koushik Dutta
5a4c527c59 docker: fixup gh action 2023-12-21 21:22:29 -08:00
Koushik Dutta
9d9c10aa1e docker: remove armv7 2023-12-21 21:21:53 -08:00
Koushik Dutta
ccf20a5fca docker: remove arm7 2023-12-21 21:20:52 -08:00
Koushik Dutta
692e7964a7 rebroadcast: fixes for wyze 2023-12-21 21:10:17 -08:00
Koushik Dutta
57e38072b1 server: fix static vs instance properties 2023-12-21 21:10:09 -08:00
Koushik Dutta
4e8e862482 postbeta 2023-12-21 21:09:41 -08:00
Koushik Dutta
eddcef8e54 rebroadcast: add trigger to reload video streams 2023-12-21 10:39:19 -08:00
Koushik Dutta
09edc6d75e wyze: add bitrate select 2023-12-21 10:28:25 -08:00
Koushik Dutta
72c7c43d79 wyze: fix race condition around teardown 2023-12-21 00:20:49 -08:00
Koushik Dutta
805f471ff9 wyze: add audio support 2023-12-21 00:10:43 -08:00
Koushik Dutta
6f797d53ec rebroadcast/webrtc: fix audio sample rate assumptions 2023-12-21 00:09:20 -08:00
Koushik Dutta
4903a0efcd sdk: update 2023-12-20 23:54:52 -08:00
Koushik Dutta
36e3fcf429 rebroadcast: fix support for rfc4571 2023-12-20 23:50:08 -08:00
Koushik Dutta
78a126fe0a wyze: make plugin visible on npm 2023-12-20 19:09:51 -08:00
Koushik Dutta
5029baf2d4 wyze: add support for substream 2023-12-20 18:05:06 -08:00
Koushik Dutta
769bc014a8 wyze: disable substream 2023-12-20 14:51:37 -08:00
Koushik Dutta
096700486a wyze: initial commit 2023-12-20 14:51:20 -08:00
Koushik Dutta
b3a7d6be9c videoanalysis: smart motion sensor now retains the last thumbnail 2023-12-20 09:36:02 -08:00
Koushik Dutta
05751bce44 Merge branch 'main' of github.com:koush/scrypted 2023-12-19 09:19:51 -08:00
Koushik Dutta
dced62a527 videoanalysis: publish 2023-12-19 09:19:46 -08:00
1vivy
359f1cfc2f docker: fixup nvidia example (#1228) 2023-12-18 15:19:56 -08:00
Koushik Dutta
d4cae8abbb common: add packet loss util to browser signaling session 2023-12-18 12:36:57 -08:00
Koushik Dutta
0e6b3346ed common: add packet loss util to browser signaling session 2023-12-18 12:34:36 -08:00
Koushik Dutta
2409cc457c sdk: remove startId to simplify implementations 2023-12-18 09:41:51 -08:00
Koushik Dutta
0b794aa381 sdk: remove reverseOrder to simplify implementations 2023-12-18 09:34:55 -08:00
Koushik Dutta
98017a5aa6 snapshot: debounce pics for 2 seconds 2023-12-17 22:58:35 -08:00
Koushik Dutta
f2e7cc4017 rebroadcast: validate device is cam or doorbell 2023-12-16 10:16:10 -08:00
Koushik Dutta
d7201a16a7 snapshot/videoanalysis: publish 2023-12-15 10:56:20 -08:00
Koushik Dutta
99d1dc7282 mqtt: support default external brokers 2023-12-13 23:51:44 -08:00
Koushik Dutta
18ae09e41c snapshot: fixup internal invocation 2023-12-13 12:22:20 -08:00
Koushik Dutta
2ebe774e59 snapshot: beta plugin that bypasses media manager for local urls 2023-12-13 12:11:49 -08:00
Koushik Dutta
b887b8a47c core: add weight to core snapshot 2023-12-13 10:51:23 -08:00
Koushik Dutta
8e391dee2f Merge branch 'main' of github.com:koush/scrypted 2023-12-13 09:07:06 -08:00
Koushik Dutta
469f693d58 snapshot: use internal prebuffer converter rather than media manager 2023-12-13 09:07:02 -08:00
slyoldfox
1c96a7d492 bticino: Implement HKSV recording for the bticino and switch the stream to rtsp (#1220)
* Implement HKSV recording for the bticino and switch the stream to rtsp

* Implement HKSV recording for the bticino and switch the stream to rtsp

---------

Co-authored-by: Marc Vanbrabant <marc@foreach.be>
2023-12-12 09:53:53 -08:00
Koushik Dutta
3f1b45c435 rebroadcast: fix unhandled rejection on stream startup failure 2023-12-12 09:44:59 -08:00
Koushik Dutta
4b715e55d2 server: beta 2023-12-11 13:01:01 -08:00
Koushik Dutta
75dc63acc3 postbeta 2023-12-11 13:00:45 -08:00
Koushik Dutta
6c79f42bb7 videoanalysis: add min object detection time 2023-12-11 10:22:32 -08:00
Koushik Dutta
9d4f006caa snapshot: fix vips rgba/gray ops 2023-12-11 09:28:24 -08:00
Koushik Dutta
05b206f897 cloud: dont attempt to register with server if not logged in 2023-12-10 10:12:39 -08:00
Koushik Dutta
1f22218b23 server: recreate cert with valid date range on startup 2023-12-10 09:21:40 -08:00
Koushik Dutta
c9568df165 mqtt: add online stat 2023-12-08 18:47:45 -08:00
Koushik Dutta
c98e91cd39 videoanalysis: add zone choices to smart sensor 2023-12-07 22:35:15 -08:00
Koushik Dutta
e3ecff04ce videoanalysis: publish 2023-12-07 13:04:51 -08:00
Koushik Dutta
f9f50f34c3 common: update digest auth 2023-12-07 12:41:07 -08:00
Koushik Dutta
cd298f7d76 common: shuffle some polygon code 2023-12-07 12:33:58 -08:00
Koushik Dutta
c95248fce0 videoanalysis: use lib that supports convex polys 2023-12-07 12:27:44 -08:00
Koushik Dutta
e50f3fa793 sdk/videoanalysis: remove filterMode from sdk. internal to video analysis only. 2023-12-07 09:43:33 -08:00
Koushik Dutta
c74be7e90f objectdetector: add zone option to smart sensor 2023-12-07 09:37:02 -08:00
Koushik Dutta
4d288727ce sdk: use a filterMode rather than a new zone type 2023-12-07 09:02:11 -08:00
Koushik Dutta
1f19dc191d sdk: add zone that doesnt filter, only used for observation 2023-12-07 08:57:14 -08:00
Koushik Dutta
37d4e5be73 homekit: send client size hints to prevent apple tv crash 2023-12-06 10:10:13 -08:00
Koushik Dutta
e64ec98211 mqtt: add temp, humidity, flood 2023-12-05 22:29:36 -08:00
Koushik Dutta
8b6c0c4f7b homekit: add hint for adaptive bitrate 2023-12-05 22:09:23 -08:00
Koushik Dutta
3b16c68c75 sdk: add fingerprint to getVideoStream 2023-12-05 22:07:36 -08:00
Koushik Dutta
67be05880c core: handle node pty failure 2023-12-04 19:28:46 -08:00
Koushik Dutta
414a9403c2 mqtt: implement basic autodiscovery 2023-12-04 13:58:59 -08:00
Koushik Dutta
053106415c tapo: publish 2023-12-02 09:25:23 -08:00
S.Feng
f3690af92a tapo: fix new firmware using sha256 hash (#1208)
ref:
5d3953a948
2023-12-02 09:04:11 -08:00
Koushik Dutta
c4cc12fdff snapshot: publish 2023-12-01 16:57:04 -08:00
Koushik Dutta
58e8772f7c sdk: publish 2023-12-01 14:17:25 -08:00
Koushik Dutta
4ae9b72471 sdk: add stream resize feedback 2023-12-01 08:51:39 -08:00
Koushik Dutta
a8c64aa2d4 sdk: improve adaptive flags 2023-12-01 08:40:08 -08:00
Koushik Dutta
8ccbba485a snapshot: publish 2023-11-30 09:59:25 -08:00
Koushik Dutta
2ec192e0fd snapshot: fix thumbnail generation near leading bounds 2023-11-30 09:59:09 -08:00
Koushik Dutta
e257953338 sdk: add landmarks and clip paths to detections 2023-11-30 08:26:14 -08:00
Koushik Dutta
9e80eca8e1 Merge branch 'main' of github.com:koush/scrypted 2023-11-29 13:17:27 -08:00
Koushik Dutta
172b32fd47 sdk: update detection result fields 2023-11-29 13:17:22 -08:00
slyoldfox
a6bf055b85 Avoid 'No audio stream detected' in prebuffer when speex is the inputAudioCodec (#1203)
Co-authored-by: Marc Vanbrabant <marc@foreach.be>
2023-11-28 08:51:19 -08:00
Koushik Dutta
dab5be1103 alexa: fix potential response race 2023-11-27 19:42:11 -08:00
Koushik Dutta
126c489934 external: update axios digest auth 2023-11-27 19:06:36 -08:00
Koushik Dutta
7f714b3d6a Merge branch 'main' of github.com:koush/scrypted 2023-11-27 19:06:16 -08:00
Koushik Dutta
fde3c47d8c common: improve ffmpeg kill func, add queue end promise 2023-11-27 19:04:09 -08:00
Koushik Dutta
4b1623dfce Update bug_report.md 2023-11-27 08:55:31 -08:00
Koushik Dutta
1e62f7a418 Update bug_report.md 2023-11-27 08:52:04 -08:00
Koushik Dutta
83c9d9a4a6 external: update axios digest auth 2023-11-26 20:13:03 -08:00
Koushik Dutta
b42afe0ca0 external: update axios digest auth 2023-11-26 20:11:27 -08:00
Koushik Dutta
e8e5f9b33e snapshot: add imageOp util function 2023-11-26 18:53:44 -08:00
Koushik Dutta
15916d83b8 rebroadcast: rollback wallclocks change, it is preventing frame updates in webassembly decoder 2023-11-26 18:31:16 -08:00
Koushik Dutta
c1327974b2 ring: update ring-client-api 2023-11-26 18:31:04 -08:00
Koushik Dutta
33e2291912 webrtc: reduce preference of turn 2023-11-26 16:10:53 -08:00
Koushik Dutta
2d2c5c436f ring: publish 2023-11-25 08:18:50 -08:00
Koushik Dutta
8088ae20b1 reolink/rebroadcast: enable wallclock timestamps on rtmp 2023-11-24 19:16:28 -08:00
Koushik Dutta
4c658b8d99 mqtt: default to empty args 2023-11-24 09:04:42 -08:00
Koushik Dutta
aab78ec797 mqtt: support invoking methods 2023-11-24 09:02:26 -08:00
Koushik Dutta
11ecff985d snapshot: fix file pathing on windows 2023-11-23 19:38:17 -08:00
Koushik Dutta
80a1a78a79 install: fix env 2023-11-23 09:12:43 -08:00
Koushik Dutta
7875c51d62 install: prevent usage of global libvips 2023-11-23 08:50:28 -08:00
Koushik Dutta
b04aa75117 alexa: fix race condition in sendResponse 2023-11-22 21:23:01 -08:00
Koushik Dutta
fc7d1eaf32 snapshot: consolidate image ops 2023-11-22 20:26:53 -08:00
Koushik Dutta
e5a7a55be8 snapshot: refactor 2023-11-22 14:33:33 -08:00
Koushik Dutta
fa9a2eb947 ha: publish 2023-11-22 13:38:39 -08:00
Koushik Dutta
30891e0769 snapshot: lazy load sharp 2023-11-22 13:37:17 -08:00
Koushik Dutta
fb8256709a postrelease 2023-11-22 13:07:51 -08:00
107 changed files with 2665 additions and 749 deletions

View File

@@ -7,6 +7,17 @@ assignees: ''
---
# Github Issues is not a Forum
**This issue tracker is not for hardware support or feature requests**. If you are troubleshooting adding a device for the first time, use Discord, Reddit, or Github Discussions. However, if something **was working**, and is now **no longer working**, you may create a Github issue.
Created issues that do not meet these requirements or are improperly filled out will be immediately closed.
# New Issue Instructions
1. Delete this section and everything above it.
2. Fill out the sections below.
**Describe the bug**
A clear and concise description of what the bug is. The issue tracker is only for reporting bugs in Scrypted, for general support check Discord. Hardrware support requests or assistance requests will be immediately closed.

View File

@@ -35,12 +35,10 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/arm64,linux/armhf
platforms: linux/arm64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM64 }}
platforms: linux/arm64
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM7 }}
platforms: linux/armhf
- name: Login to Docker Hub
uses: docker/login-action@v2
@@ -63,7 +61,7 @@ jobs:
BASE=${{ matrix.BASE }}
context: install/docker/
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
platforms: linux/amd64,linux/armhf,linux/arm64
platforms: linux/amd64,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}

View File

@@ -61,12 +61,10 @@ jobs:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/arm64,linux/armhf
platforms: linux/arm64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM64 }}
platforms: linux/arm64
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM7 }}
platforms: linux/armhf
- name: Login to Docker Hub
uses: docker/login-action@v2
@@ -89,7 +87,7 @@ jobs:
SCRYPTED_INSTALL_VERSION=${{ steps.package-version.outputs.NPM_VERSION }}
context: install/docker/
file: install/docker/Dockerfile${{ matrix.SUPERVISOR }}
platforms: linux/amd64,linux/arm64,linux/armhf
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}

3
.gitmodules vendored
View File

@@ -35,3 +35,6 @@
[submodule "plugins/cloud/node-nat-upnp"]
path = plugins/cloud/external/node-nat-upnp
url = ../../koush/node-nat-upnp.git
[submodule "plugins/wyze/docker-wyze-bridge"]
path = plugins/wyze/docker-wyze-bridge
url = ../../koush/docker-wyze-bridge.git

View File

@@ -5,6 +5,7 @@ class EndError extends Error {
export function createAsyncQueue<T>() {
let ended: Error | undefined;
const endDeferred = new Deferred<void>();
const waiting: Deferred<T>[] = [];
const queued: { item: T, dequeued?: Deferred<void> }[] = [];
@@ -75,7 +76,8 @@ export function createAsyncQueue<T>() {
if (ended)
return false;
// catch to prevent unhandled rejection.
ended = e || new EndError()
ended = e || new EndError();
endDeferred.resolve();
while (waiting.length) {
waiting.shift().reject(ended);
}
@@ -124,6 +126,7 @@ export function createAsyncQueue<T>() {
get ended() {
return ended;
},
endPromise: endDeferred.promise,
take,
clear() {
return clear();

View File

@@ -78,11 +78,17 @@ export function createPromiseDebouncer<T>() {
export function createMapPromiseDebouncer<T>() {
const map = new Map<string, Promise<T>>();
return (key: any, func: () => Promise<T>): Promise<T> => {
return (key: any, debounce: number, func: () => Promise<T>): Promise<T> => {
const keyStr = JSON.stringify(key);
let value = map.get(keyStr);
if (!value) {
value = func().finally(() => map.delete(keyStr));
value = func().finally(() => {
if (!debounce) {
map.delete(keyStr);
return;
}
setTimeout(() => map.delete(keyStr), debounce);
});
map.set(keyStr, value);
}
return value;

View File

@@ -59,13 +59,13 @@ export async function read16BELengthLoop(readable: Readable, options: {
export class StreamEndError extends Error {
constructor() {
super()
super('stream ended');
}
}
export async function readLength(readable: Readable, length: number): Promise<Buffer> {
if (readable.readableEnded || readable.destroyed)
throw new Error("stream ended");
throw new StreamEndError();
if (!length) {
return Buffer.alloc(0);

View File

@@ -73,6 +73,14 @@ function createOptions() {
return options;
}
// can be called on anything with getStats, ie for receiver specific reports or connection reports.
export async function getPacketsLost(t: { getStats(): Promise<RTCStatsReport> }) {
const stats = await t.getStats();
const packetsLost = ([...stats.values()] as { packetsLost: number }[]).filter(stat => 'packetsLost' in stat).map(stat => stat.packetsLost);
const total = packetsLost.reduce((p, c) => p + c, 0);
return total;
}
export class BrowserSignalingSession implements RTCSignalingSession {
private pc: RTCPeerConnection;
pcDeferred = new Deferred<RTCPeerConnection>();
@@ -90,6 +98,10 @@ export class BrowserSignalingSession implements RTCSignalingSession {
return this.options;
}
async getPacketsLost() {
return getPacketsLost(this.pc);
}
async setMicrophone(enabled: boolean) {
if (this.microphone && enabled && !this.micEnabled) {
this.micEnabled = true;

View File

@@ -207,6 +207,10 @@ export function parseRtpMap(mlineType: string, rtpmap: string) {
codec = 'pcm_s16be';
ffmpegEncoder = 'pcm_s16be';
}
else if (rtpmap?.includes('speex')) {
codec = 'speex';
ffmpegEncoder = 'libspeex';
}
else if (rtpmap?.includes('h264')) {
codec = 'h264';
}

View File

@@ -1,6 +1,6 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "18-jammy-full.s6-v0.66.0"
version: "18-jammy-full.s6-v0.68.0"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"

View File

@@ -34,14 +34,14 @@ services:
- SCRYPTED_WEBHOOK_UPDATE_AUTHORIZATION=Bearer SET_THIS_TO_SOME_RANDOM_TEXT
- SCRYPTED_WEBHOOK_UPDATE=http://localhost:10444/v1/update
# Uncomment next 3 lines for Nvidia GPU support.
# - NVIDIA_VISIBLE_DEVICES=all
# - NVIDIA_DRIVER_CAPABILITIES=all
# Uncomment next line to run avahi-daemon inside the container
# Don't use if dbus and avahi run on the host and are bind-mounted
# (see below under "volumes")
# - SCRYPTED_DOCKER_AVAHI=true
# Uncomment next 3 lines for Nvidia GPU support.
# - NVIDIA_VISIBLE_DEVICES=all
# - NVIDIA_DRIVER_CAPABILITIES=all
# runtime: nvidia
volumes:

View File

@@ -1,12 +1,12 @@
{
"name": "scrypted",
"version": "1.3.0",
"version": "1.3.4",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "scrypted",
"version": "1.3.0",
"version": "1.3.4",
"license": "ISC",
"dependencies": {
"@scrypted/client": "^1.3.2",

View File

@@ -1,6 +1,6 @@
{
"name": "scrypted",
"version": "1.3.3",
"version": "1.3.5",
"description": "",
"main": "./dist/packages/cli/src/main.js",
"bin": {

View File

@@ -18,7 +18,12 @@ async function runCommand(command: string, ...args: string[]) {
command += '.cmd';
console.log('running', command, ...args);
const cp = child_process.spawn(command, args, {
stdio: 'inherit'
stdio: 'inherit',
env: {
...process.env,
// https://github.com/lovell/sharp/blob/eefaa998725cf345227d94b40615e090495c6d09/lib/libvips.js#L115C19-L115C46
SHARP_IGNORE_GLOBAL_LIBVIPS: 'true',
},
});
await once(cp, 'exit');
if (cp.exitCode)

View File

@@ -1,24 +1,40 @@
{
"name": "@scrypted/alexa",
"version": "0.2.7",
"version": "0.2.10",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/alexa",
"version": "0.2.7",
"version": "0.2.10",
"dependencies": {
"axios": "^1.3.4",
"uuid": "^9.0.0"
},
"devDependencies": {
"@scrypted/common": "../../common",
"@scrypted/sdk": "../../sdk",
"@types/node": "^18.4.2"
}
},
"../../common": {
"version": "1.0.1",
"dev": true,
"license": "ISC",
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^4.4.3"
},
"devDependencies": {
"@types/node": "^16.9.0"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.104",
"version": "0.2.108",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -54,6 +70,13 @@
"typedoc": "^0.23.21"
}
},
"../common": {
"extraneous": true
},
"node_modules/@scrypted/common": {
"resolved": "../../common",
"link": true
},
"node_modules/@scrypted/sdk": {
"resolved": "../../sdk",
"link": true
@@ -70,9 +93,9 @@
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/axios": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.3.4.tgz",
"integrity": "sha512-toYm+Bsyl6VC5wSkfkbbNB6ROv7KY93PEBBL6xyDczaIHasAiv4wPqQ/c4RjoQzipxRD2W5g21cOqQulZ7rHwQ==",
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"dependencies": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/alexa",
"version": "0.2.8",
"version": "0.2.10",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",
@@ -39,6 +39,7 @@
},
"devDependencies": {
"@types/node": "^18.4.2",
"@scrypted/sdk": "../../sdk"
"@scrypted/sdk": "../../sdk",
"@scrypted/common": "../../common"
}
}

View File

@@ -79,7 +79,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
if (status === DeviceMixinStatus.Setup)
await this.syncEndpoints();
if (status === DeviceMixinStatus.Setup || status === DeviceMixinStatus.AlreadySetup) {
if (status === DeviceMixinStatus.Setup || status === DeviceMixinStatus.AlreadySetup) {
if (!this.devices.has(eventSource.id)) {
this.devices.set(eventSource.id, eventSource);
@@ -142,7 +142,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
await this.syncEndpoints();
}
async deviceListen(eventSource: ScryptedDevice | undefined, eventDetails: EventDetails, eventData: any) : Promise<void> {
async deviceListen(eventSource: ScryptedDevice | undefined, eventDetails: EventDetails, eventData: any): Promise<void> {
if (!eventSource)
return;
@@ -194,14 +194,14 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
// nothing to report
if (data.context === undefined && data.event.payload === undefined)
return;
return;
data = await this.addAccessToken(data);
await this.postEvent(data);
}
private async addAccessToken(data: any) : Promise<any> {
private async addAccessToken(data: any): Promise<any> {
const accessToken = await this.getAccessToken();
if (data.event === undefined)
@@ -232,7 +232,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
'api.fe.amazonalexa.com'
];
async getAlexaEndpoint() : Promise<string> {
async getAlexaEndpoint(): Promise<string> {
if (this.storageSettings.values.apiEndpoint)
return this.storageSettings.values.apiEndpoint;
@@ -276,7 +276,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
});
}
async getEndpoints() : Promise<DiscoveryEndpoint[]> {
async getEndpoints(): Promise<DiscoveryEndpoint[]> {
const endpoints: DiscoveryEndpoint[] = [];
for (const id of Object.keys(systemManager.getSystemState())) {
@@ -284,7 +284,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
if (!device.mixins?.includes(this.id))
continue;
const endpoint = await this.getEndpointForDevice(device);
if (endpoint)
endpoints.push(endpoint);
@@ -319,7 +319,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
const endpoints = await this.getEndpoints();
if (!endpoints.length)
return [];
return [];
const accessToken = await this.getAccessToken();
const data = {
@@ -448,7 +448,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
self.console.warn(error?.response?.data);
self.log.a(error?.response?.data?.error_description);
break;
case 'expired_token':
self.console.warn(error?.response?.data);
self.log.a(error?.response?.data?.error_description);
@@ -480,9 +480,14 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
this.storageSettings.values.tokenInfo = grant;
this.storageSettings.values.apiEndpoint = undefined;
this.accessToken = undefined;
const self = this;
let accessToken = await this.getAccessToken().catch(reason => {
let accessToken: any;
try {
accessToken = await this.getAccessToken();
}
catch (reason) {
self.console.error(`Failed to handle the AcceptGrant directive because ${reason}`);
this.storageSettings.values.tokenInfo = undefined;
@@ -491,36 +496,23 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
response.send(authErrorResponse("ACCEPT_GRANT_FAILED", `Failed to handle the AcceptGrant directive because ${reason}`, directive));
return undefined;
});
if (accessToken !== undefined) {
this.log.clearAlerts();
try {
response.send({
"event": {
"header": {
"namespace": "Alexa.Authorization",
"name": "AcceptGrant.Response",
"messageId": createMessageId(),
"payloadVersion": "3"
},
"payload": {}
}
});
} catch (error) {
this.console.error(`AcceptGrant.Response failed because ${error}`);
this.storageSettings.values.tokenInfo = undefined;
this.storageSettings.values.apiEndpoint = undefined;
this.accessToken = undefined;
throw error;
return;
};
this.log.clearAlerts();
response.send({
"event": {
"header": {
"namespace": "Alexa.Authorization",
"name": "AcceptGrant.Response",
"messageId": createMessageId(),
"payloadVersion": "3"
},
"payload": {}
}
}
});
}
async getEndpointForDevice(device: ScryptedDevice) : Promise<DiscoveryEndpoint> {
async getEndpointForDevice(device: ScryptedDevice): Promise<DiscoveryEndpoint> {
if (!device)
return;
@@ -545,7 +537,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
};
let supportedEndpointHealths: any[] = [];
if (device.interfaces.includes(ScryptedInterface.Online)) {
supportedEndpointHealths.push({
"name": "connectivity"
@@ -632,8 +624,10 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
debug("received directive from alexa", mapName, body);
const handler = alexaHandlers.get(mapName);
if (handler)
return handler.apply(this, [request, response, directive]);
if (handler) {
await handler.apply(this, [request, response, directive]);
return;
}
const deviceHandler = alexaDeviceHandlers.get(mapName);
@@ -644,7 +638,8 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
return;
}
return deviceHandler.apply(this, [request, response, directive, device]);
await deviceHandler.apply(this, [request, response, directive, device]);
return;
} else {
this.console.error(`no handler for: ${mapName}`);
}

View File

@@ -4,6 +4,7 @@ import { v4 as createMessageId } from 'uuid';
import { AlexaHttpResponse, sendDeviceResponse } from "../../common";
import { alexaDeviceHandlers } from "../../handlers";
import { Response, WebRTCAnswerGeneratedForSessionEvent, WebRTCSessionConnectedEvent, WebRTCSessionDisconnectedEvent } from '../../alexa'
import { Deferred } from '@scrypted/common/src/deferred';
export class AlexaSignalingSession implements RTCSignalingSession {
constructor(public response: AlexaHttpResponse, public directive: any) {
@@ -13,7 +14,8 @@ export class AlexaSignalingSession implements RTCSignalingSession {
__proxy_props: { options: RTCSignalingOptions; };
options: RTCSignalingOptions;
remoteDescription = new Deferred<void>();
async getOptions(): Promise<RTCSignalingOptions> {
return this.options;
}
@@ -39,11 +41,17 @@ export class AlexaSignalingSession implements RTCSignalingSession {
}
async createLocalDescription(type: "offer" | "answer", setup: RTCAVSignalingSetup, sendIceCandidate: RTCSignalingSendIceCandidate): Promise<RTCSessionDescriptionInit> {
if (type !== 'offer')
throw new Error('Alexa only supports RTC offer');
if (type !== 'offer') {
const e = new Error('Alexa only supports RTC offer');
this.remoteDescription.reject(e);
throw e;
}
if (sendIceCandidate)
throw new Error("Alexa does not support trickle ICE");
if (sendIceCandidate) {
const e = new Error("Alexa does not support trickle ICE");
this.remoteDescription.reject(e);
throw e;
}
return {
type: type,
@@ -67,15 +75,16 @@ export class AlexaSignalingSession implements RTCSignalingSession {
},
context: undefined
};
data.event.header.name = "AnswerGeneratedForSession";
data.event.header.messageId = createMessageId();
data.event.payload.answer = {
format: 'SDP',
value: description.sdp,
};
this.remoteDescription.resolve();
this.response.send(data);
}
}
@@ -85,13 +94,14 @@ const sessionCache = new Map<string, RTCSessionControl>();
alexaDeviceHandlers.set('Alexa.RTCSessionController/InitiateSessionWithOffer', async (request, response, directive: any, device: ScryptedDevice & RTCSignalingChannel) => {
const { header, endpoint, payload } = directive;
const { sessionId } = payload;
const session = new AlexaSignalingSession(response, directive);
const control = await device.startRTCSignalingSession(session);
control.setPlayback({
audio: true,
video: false,
})
});
await session.remoteDescription.promise;
sessionCache.set(sessionId, control);
});
@@ -115,13 +125,13 @@ alexaDeviceHandlers.set('Alexa.RTCSessionController/SessionConnected', async (re
alexaDeviceHandlers.set('Alexa.RTCSessionController/SessionDisconnected', async (request, response, directive: any, device: ScryptedDevice) => {
const { header, endpoint, payload } = directive;
const { sessionId } = payload;
const session = sessionCache.get(sessionId);
if (session) {
sessionCache.delete(sessionId);
await session.endSession();
}
const data: WebRTCSessionDisconnectedEvent = {
"event": {
header,
@@ -130,9 +140,9 @@ alexaDeviceHandlers.set('Alexa.RTCSessionController/SessionDisconnected', async
},
context: undefined
};
data.event.header.messageId = createMessageId();
response.send(data);
});
@@ -152,14 +162,14 @@ alexaDeviceHandlers.set('Alexa.SmartVision.ObjectDetectionSensor/SetObjectDetect
},
"context": {
"properties": [{
"namespace": "Alexa.SmartVision.ObjectDetectionSensor",
"name": "objectDetectionClasses",
"value": detectionTypes.classes.map(type => ({
"imageNetClass": type
})),
timeOfSample: new Date().toISOString(),
uncertaintyInMilliseconds: 0
}]
"namespace": "Alexa.SmartVision.ObjectDetectionSensor",
"name": "objectDetectionClasses",
"value": detectionTypes.classes.map(type => ({
"imageNetClass": type
})),
timeOfSample: new Date().toISOString(),
uncertaintyInMilliseconds: 0
}]
}
};

View File

@@ -20,6 +20,7 @@
}
},
"../../common": {
"name": "@scrypted/common",
"version": "1.0.1",
"license": "ISC",
"dependencies": {
@@ -34,7 +35,8 @@
}
},
"../../sdk": {
"version": "0.2.103",
"name": "@scrypted/sdk",
"version": "0.3.2",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -70,9 +72,9 @@
}
},
"node_modules/@koush/axios-digest-auth": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.5.tgz",
"integrity": "sha512-EZMM0gMJ3hMUD4EuUqSwP6UGt5Vmw2TZtY7Ypec55AnxkExSXM0ySgPtqkAcnL43g1R27yAg/dQL7dRTLMqO3Q==",
"version": "0.8.6",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.6.tgz",
"integrity": "sha512-e/XKs7/BYpPQkces0Cm4dUmhT9hR0rjvnNZAVRyRnNWdQ8cyCMFWS9HIrMWOdzAocKDNBXi1vKjJ8CywrW5xgQ==",
"dependencies": {
"auth-header": "^1.0.0",
"axios": "^0.21.4"

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/bticino",
"version": "0.0.12",
"version": "0.0.13",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/bticino",
"version": "0.0.12",
"version": "0.0.13",
"dependencies": {
"@slyoldfox/sip": "^0.0.6-1",
"sdp": "^3.0.3",
@@ -40,7 +40,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.105",
"version": "0.3.2",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/bticino",
"version": "0.0.12",
"version": "0.0.13",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -1,10 +1,10 @@
import { closeQuiet, createBindZero, listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { closeQuiet, createBindUdp, createBindZero, listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { sleep } from '@scrypted/common/src/sleep';
import { RtspServer } from '@scrypted/common/src/rtsp-server';
import { addTrackControls } from '@scrypted/common/src/sdp-utils';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import { addTrackControls, parseSdp } from '@scrypted/common/src/sdp-utils';
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, MotionSensor, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import { SipCallSession } from '../../sip/src/sip-call-session';
import { RtpDescription } from '../../sip/src/rtp-utils';
import { RtpDescription, getPayloadType, getSequenceNumber, isRtpMessagePayloadType, isStunMessage } from '../../sip/src/rtp-utils';
import { VoicemailHandler } from './bticino-voicemailHandler';
import { CompositeSipMessageHandler } from '../../sip/src/compositeSipMessageHandler';
import { SipHelper } from './sip-helper';
@@ -16,7 +16,7 @@ import { BticinoSipLock } from './bticino-lock';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
import { PersistentSipManager } from './persistent-sip-manager';
import { InviteHandler } from './bticino-inviteHandler';
import { SipRequest } from '../../sip/src/sip-manager';
import { SipOptions, SipRequest } from '../../sip/src/sip-manager';
import { get } from 'http'
import { ControllerApi } from './c300x-controller-api';
@@ -26,14 +26,12 @@ import { BticinoMuteSwitch } from './bticino-mute-switch';
const STREAM_TIMEOUT = 65000;
const { mediaManager } = sdk;
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips, Reboot {
export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor, DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips, Reboot {
private session: SipCallSession
private remoteRtpDescription: Promise<RtpDescription>
private audioOutForwarder: dgram.Socket
private audioOutProcess: ChildProcess
private currentMedia: FFmpegInput | MediaStreamUrl
private currentMediaMimeType: string
private refreshTimeout: NodeJS.Timeout
public requestHandlers: CompositeSipMessageHandler = new CompositeSipMessageHandler()
public incomingCallRequest : SipRequest
@@ -41,16 +39,21 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
private voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
private inviteHandler : InviteHandler = new InviteHandler(this)
private controllerApi : ControllerApi = new ControllerApi(this)
private muteSwitch : BticinoMuteSwitch
private aswmSwitch : BticinoAswmSwitch
private deferredCleanup
private currentMediaObject : Promise<MediaObject>
private lastImageRefresh : number
//TODO: randomize this
private keyAndSalt : string = "/qE7OPGKp9hVGALG2KcvKWyFEZfSSvm7bYVDjT8X"
//private decodedSrtpOptions : SrtpOptions = decodeSrtpOptions( this.keyAndSalt )
private persistentSipManager : PersistentSipManager
public doorbellWebhookUrl : string
public doorbellLockWebhookUrl : string
private cachedImage : Buffer
constructor(nativeId: string, public provider: BticinoSipPlugin) {
super(nativeId)
this.requestHandlers.add( this.voicemailHandler ).add( this.inviteHandler )
this.persistentSipManager = new PersistentSipManager( this );
(async() => {
@@ -190,7 +193,21 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
async takePicture(option?: PictureOptions): Promise<MediaObject> {
throw new Error("The SIP doorbell camera does not provide snapshots. Install the Snapshot Plugin if snapshots are available via an URL.");
const thumbnailCacheTime : number = parseInt( this.storage?.getItem('thumbnailCacheTime') ) * 1000 || 300000
const now = new Date().getTime()
if( !this.lastImageRefresh || this.lastImageRefresh + thumbnailCacheTime < now ) {
// get a proxy object to make sure we pass prebuffer when already watching a stream
let cam : VideoCamera = sdk.systemManager.getDeviceById<VideoCamera>(this.id)
let vs : MediaObject = await cam.getVideoStream()
let buf : Buffer = await mediaManager.convertMediaObjectToBuffer(vs, 'image/jpeg');
this.cachedImage = buf
this.lastImageRefresh = new Date().getTime()
this.console.log(`Camera picture updated and cached: ${this.lastImageRefresh} + cache time: ${thumbnailCacheTime} < ${now}`)
} else {
this.console.log(`Not refreshing camera picture: ${this.lastImageRefresh} + cache time: ${thumbnailCacheTime} < ${now}`)
}
return mediaManager.createMediaObject(this.cachedImage, 'image/jpeg')
}
async getPictureOptions(): Promise<PictureOptions[]> {
@@ -206,8 +223,17 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
async startIntercom(media: MediaObject): Promise<void> {
if (!this.session)
throw new Error("not in call");
if (!this.session) {
const cleanup = () => {
this.console.log("STARTINTERCOM CLEANUP CALLED: " + this.session )
this.session?.stop()
this.session = undefined
this.deferredCleanup()
this.console.log("STARTINTERCOM CLEANUP ENDED")
}
this.session = await this.callIntercom( cleanup )
}
this.stopIntercom();
@@ -280,27 +306,24 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
throw new Error('Please configure from/to/domain settings')
}
if (options?.metadata?.refreshAt) {
if (!this.currentMedia?.mediaStreamOptions)
throw new Error("no stream to refresh");
const currentMedia = this.currentMedia
currentMedia.mediaStreamOptions.refreshAt = Date.now() + STREAM_TIMEOUT;
currentMedia.mediaStreamOptions.metadata = {
refreshAt: currentMedia.mediaStreamOptions.refreshAt
};
this.resetStreamTimeout()
return mediaManager.createMediaObject(currentMedia, this.currentMediaMimeType)
}
this.console.log("Before stopping session")
this.stopSession();
const { clientPromise: playbackPromise, port: playbackPort, url: clientUrl } = await listenZeroSingleClient()
this.console.log("After stopping session")
const playbackUrl = clientUrl
let rebroadcastEnabled = this.interfaces?.includes( "mixin:@scrypted/prebuffer-mixin")
const { clientPromise: playbackPromise, port: playbackPort } = await listenZeroSingleClient()
const playbackUrl = `rtsp://127.0.0.1:${playbackPort}`
this.console.log("PLAYBACKURL: " +playbackUrl)
playbackPromise.then(async (client) => {
client.setKeepAlive(true, 10000)
let sip: SipCallSession
let audioSplitter
let videoSplitter
try {
if( !this.incomingCallRequest ) {
// If this is a "view" call, update the stream endpoint to send it only to "us"
@@ -309,61 +332,37 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
}
let rtsp: RtspServer;
const cleanup = () => {
this.console.log("CLEANUP CALLED")
client.destroy();
if (this.session === sip)
this.session = undefined
try {
this.log.d('cleanup(): stopping sip session.')
sip.stop()
sip?.stop()
this.currentMediaObject = undefined
}
catch (e) {
}
audioSplitter?.server?.close()
videoSplitter?.server?.close()
rtsp?.destroy()
this.console.log("CLEANUP ENDED")
this.deferredCleanup = undefined
this.remoteRtpDescription = undefined
}
this.deferredCleanup = cleanup
client.on('close', cleanup)
client.on('error', cleanup)
let sipOptions = SipHelper.sipOptions( this )
sip = await this.persistentSipManager.session( sipOptions );
// Validate this sooner
if( !sip ) return Promise.reject("Cannot create session")
sip.onCallEnded.subscribe(cleanup)
// Call the C300X
this.remoteRtpDescription = sip.callOrAcceptInvite(
( audio ) => {
return [
//TODO: Payload types are hardcoded
`m=audio 65000 RTP/SAVP 110`,
`a=rtpmap:110 speex/8000`,
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${this.keyAndSalt}`,
]
}, ( video ) => {
if( false ) {
//TODO: implement later
return [
`m=video 0 RTP/SAVP 0`
]
} else {
return [
//TODO: Payload types are hardcoded
`m=video 65002 RTP/SAVP 96`,
`a=rtpmap:96 H264/90000`,
`a=fmtp:96 profile-level-id=42801F`,
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${this.keyAndSalt}`,
'a=recvonly'
]
}
}, this.incomingCallRequest );
this.incomingCallRequest = undefined
if( !rebroadcastEnabled || (rebroadcastEnabled && !this.incomingCallRequest ) ) {
sip = await this.callIntercom( cleanup )
}
//let sdp: string = replacePorts(this.remoteRtpDescription.sdp, 0, 0 )
let sdp : string = [
let sdp : string = [
"v=0",
"m=audio 5000 RTP/AVP 110",
"c=IN IP4 127.0.0.1",
@@ -375,42 +374,141 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
//sdp = sdp.replaceAll(/a=crypto\:1.*/g, '')
//sdp = sdp.replaceAll(/RTP\/SAVP/g, 'RTP\/AVP')
//sdp = sdp.replaceAll('\r\n\r\n', '\r\n')
sdp = addTrackControls(sdp)
sdp = sdp.split('\n').filter(line => !line.includes('a=rtcp-mux')).join('\n')
if( sipOptions.debugSip )
this.log.d('SIP: Updated SDP:\n' + sdp);
client.write(sdp)
client.end()
let vseq = 0;
let vseen = 0;
let vlost = 0;
let aseq = 0;
let aseen = 0;
let alost = 0;
sdp = addTrackControls(sdp);
sdp = sdp.split('\n').filter(line => !line.includes('a=rtcp-mux')).join('\n');
this.console.log('proposed sdp', sdp);
this.console.log("================= AUDIOSPLITTER CREATING.... ============" )
audioSplitter = await createBindUdp(5000)
this.console.log("================= AUDIOSPLITTER CREATED ============" )
audioSplitter.server.on('close', () => {
this.console.log("================= CLOSED AUDIOSPLITTER ================")
audioSplitter = undefined
})
this.console.log("================= VIDEOSPLITTER CREATING.... ============" )
videoSplitter = await createBindUdp(5002)
this.console.log("================= VIDEOSPLITTER CREATED.... ============" )
videoSplitter.server.on('close', () => {
this.console.log("================= CLOSED VIDEOSPLITTER ================")
videoSplitter = undefined
})
rtsp = new RtspServer(client, sdp, false);
const parsedSdp = parseSdp(rtsp.sdp);
const videoTrack = parsedSdp.msections.find(msection => msection.type === 'video').control;
const audioTrack = parsedSdp.msections.find(msection => msection.type === 'audio').control;
rtsp.console = this.console;
await rtsp.handlePlayback();
this.session = sip
videoSplitter.server.on('message', (message, rinfo) => {
if ( !isStunMessage(message)) {
const isRtpMessage = isRtpMessagePayloadType(getPayloadType(message));
if (!isRtpMessage)
return;
vseen++;
try {
rtsp.sendTrack(videoTrack, message, !isRtpMessage);
} catch(e ) {
this.console.log(e)
}
const seq = getSequenceNumber(message);
if (seq !== (vseq + 1) % 0x0FFFF)
vlost++;
vseq = seq;
}
});
audioSplitter.server.on('message', (message, rinfo ) => {
if ( !isStunMessage(message)) {
const isRtpMessage = isRtpMessagePayloadType(getPayloadType(message));
if (!isRtpMessage)
return;
aseen++;
try {
rtsp.sendTrack(audioTrack, message, !isRtpMessage);
} catch(e) {
this.console.log(e)
}
const seq = getSequenceNumber(message);
if (seq !== (aseq + 1) % 0x0FFFF)
alost++;
aseq = seq;
}
});
try {
await rtsp.handleTeardown();
this.console.log('rtsp client ended');
} catch (e) {
this.console.log('rtsp client ended ungracefully', e);
} finally {
cleanup();
}
}
catch (e) {
this.console.error(e)
sip?.stop()
throw e;
}
});
this.resetStreamTimeout();
const mediaStreamOptions = Object.assign(this.getSipMediaStreamOptions(), {
refreshAt: Date.now() + STREAM_TIMEOUT,
});
const ffmpegInput: FFmpegInput = {
url: undefined,
container: 'sdp',
mediaStreamOptions,
inputArguments: [
'-f', 'sdp',
'-i', playbackUrl,
],
const mediaStreamUrl: MediaStreamUrl = {
url: playbackUrl,
mediaStreamOptions: this.getSipMediaStreamOptions(),
};
this.currentMedia = ffmpegInput;
this.currentMediaMimeType = ScryptedMimeTypes.FFmpegInput;
return mediaManager.createFFmpegMediaObject(ffmpegInput);
sleep(2500).then( () => this.takePicture() )
this.currentMediaObject = mediaManager.createMediaObject(mediaStreamUrl, ScryptedMimeTypes.MediaStreamUrl);
// Invalidate any cached image and take a picture after some seconds to take into account the opening of the lens
this.lastImageRefresh = undefined
return this.currentMediaObject
}
async callIntercom( cleanup ) : Promise<SipCallSession> {
let sipOptions : SipOptions = SipHelper.sipOptions( this )
let sip : SipCallSession = await this.persistentSipManager.session( sipOptions );
// Validate this sooner
if( !sip ) return Promise.reject("Cannot create session")
sip.onCallEnded.subscribe(cleanup)
// Call the C300X
this.remoteRtpDescription = sip.callOrAcceptInvite(
( audio ) => {
return [
// this SDP is used by the intercom and will send the encrypted packets which we don't care about to the loopback on port 65000 of the intercom
`m=audio 65000 RTP/SAVP 110`,
`a=rtpmap:110 speex/8000`,
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${this.keyAndSalt}`,
]
}, ( video ) => {
return [
// this SDP is used by the intercom and will send the encrypted packets which we don't care about to the loopback on port 65000 of the intercom
`m=video 65002 RTP/SAVP 96`,
`a=rtpmap:96 H264/90000`,
`a=fmtp:96 profile-level-id=42801F`,
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${this.keyAndSalt}`,
'a=recvonly'
]
}, this.incomingCallRequest );
this.incomingCallRequest = undefined
return sip
}
getSipMediaStreamOptions(): ResponseMediaStreamOptions {
@@ -419,13 +517,16 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
name: 'SIP',
// this stream is NOT scrypted blessed due to wackiness in the h264 stream.
// tool: "scrypted",
container: 'sdp',
container: 'rtsp',
video: {
codec: 'h264'
},
audio: {
// this is a hint to let homekit, et al, know that it's speex audio and needs transcoding.
codec: 'speex',
},
source: 'cloud', // to disable prebuffering
userConfigurable: false,
userConfigurable: true,
};
}
@@ -437,18 +538,26 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
async getDevice(nativeId: string) : Promise<any> {
if( nativeId && nativeId.endsWith('-aswm-switch')) {
return new BticinoAswmSwitch(this, this.voicemailHandler)
this.aswmSwitch = new BticinoAswmSwitch(this, this.voicemailHandler)
return this.aswmSwitch
} else if( nativeId && nativeId.endsWith('-mute-switch') ) {
return new BticinoMuteSwitch(this)
this.muteSwitch = new BticinoMuteSwitch(this)
return this.muteSwitch
}
return new BticinoSipLock(this)
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
this.stopIntercom()
this.voicemailHandler.cancelTimer()
this.persistentSipManager.cancelTimer()
this.controllerApi.cancelTimer()
if( nativeId?.endsWith('-aswm-switch') ) {
this.aswmSwitch.cancelTimer()
} else if( nativeId?.endsWith('mute-switch') ) {
this.muteSwitch.cancelTimer()
} else {
this.stopIntercom()
this.voicemailHandler.cancelTimer()
this.persistentSipManager.cancelTimer()
this.controllerApi.cancelTimer()
}
}
reset() {

View File

@@ -0,0 +1,64 @@
import { ScryptedDeviceBase, HttpRequest, HttpResponse, HttpRequestHandler, OnOff } from "@scrypted/sdk";
import { BticinoSipCamera } from "./bticino-camera";
export class BticinoMuteSwitch extends ScryptedDeviceBase implements OnOff, HttpRequestHandler {
private timeout : NodeJS.Timeout
constructor(private camera: BticinoSipCamera) {
super( camera.nativeId + "-mute-switch");
this.on = false;
this.timeout = setTimeout( () => this.syncStatus() , 5000 )
}
turnOff(): Promise<void> {
this.on = false
return this.camera.muteRinger(false)
}
turnOn(): Promise<void> {
this.on = true
return this.camera.muteRinger(true)
}
syncStatus() {
this.camera.muteStatus().then( (value) => {
this.on = value["status"]
} ).catch( (e) => { this.camera.console.error(e) } ).finally( () => {
this.timeout = setTimeout( () => this.syncStatus() , 60000 )
} )
}
cancelTimer() {
if( this.timeout ) {
clearTimeout(this.timeout)
}
}
public async onRequest(request: HttpRequest, response: HttpResponse): Promise<void> {
if (request.url.endsWith('/disabled')) {
this.on = false
response.send('Success', {
code: 200,
});
} else if( request.url.endsWith('/enabled') ) {
this.on = true
response.send('Success', {
code: 200,
});
} else if( request.url.endsWith('/enable') ) {
this.turnOn()
response.send('Success', {
code: 200,
});
} else if( request.url.endsWith('/disable') ) {
this.turnOff()
response.send('Success', {
code: 200,
});
} else {
response.send('Unsupported operation', {
code: 400,
});
}
}
}

View File

@@ -115,6 +115,7 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
ScryptedInterface.Settings,
ScryptedInterface.Intercom,
ScryptedInterface.BinarySensor,
ScryptedInterface.MotionSensor,
ScryptedDeviceType.DeviceProvider,
ScryptedInterface.HttpRequestHandler,
ScryptedInterface.VideoClips,

View File

@@ -61,7 +61,7 @@ export class SipHelper {
if( !md5 ) {
md5 = crypto.createHash('md5').update( camera.nativeId ).digest("hex")
md5 = md5.substring(0, 8) + '-' + md5.substring(8, 12) + '-' + md5.substring(12,16) + '-' + md5.substring(16, 32)
camera.storage.setItem('md5has', md5)
camera.storage.setItem('md5hash', md5)
}
return md5
}

View File

@@ -35,6 +35,14 @@ export class BticinoStorageSettings {
defaultValue: 600,
placeholder: '600',
},
thumbnailCacheTime: {
title: 'Thumbnail cache time',
type: 'number',
range: [60, 86400],
description: 'How long the snapshot is cached before taking a new one. (in seconds)',
defaultValue: 300,
placeholder: '300',
},
sipdebug: {
title: 'SIP debug logging',
type: 'boolean',

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/cloud",
"version": "0.2.3",
"version": "0.2.4",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/cloud",
"version": "0.2.3",
"version": "0.2.4",
"dependencies": {
"@eneris/push-receiver": "^3.1.4",
"@scrypted/common": "file:../../common",

View File

@@ -54,5 +54,5 @@
"@types/nat-upnp": "^1.1.2",
"@types/node": "^20.4.5"
},
"version": "0.2.3"
"version": "0.2.4"
}

View File

@@ -574,6 +574,8 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
});
const { token_info } = this.storageSettings.values;
if (!token_info)
throw new Error('Scrypted Cloud is not logged in. Skipping home.scrypted.app registration.');
const response = await axios(`https://${SCRYPTED_SERVER}/_punch/register?${q}`, {
headers: {
Authorization: `Bearer ${token_info}`

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.1.149",
"version": "0.1.150",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.1.149",
"version": "0.1.150",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.1.149",
"version": "0.1.150",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -16,7 +16,7 @@ export class MediaCore extends ScryptedDeviceBase implements DeviceProvider, Buf
constructor() {
super(MediaCoreNativeId);
this.fromMimeType = ScryptedMimeTypes.SchemePrefix + 'scrypted-media';
this.fromMimeType = ScryptedMimeTypes.SchemePrefix + 'scrypted-media' + ';converter-weight=2';
this.toMimeType = ScryptedMimeTypes.MediaObject;
(async () => {

View File

@@ -1,5 +1,5 @@
import { ScryptedDeviceBase, ScryptedNativeId, StreamService } from "@scrypted/sdk";
import { IPty, spawn as ptySpawn } from 'node-pty-prebuilt-multiarch';
import type { IPty, spawn as ptySpawn } from 'node-pty-prebuilt-multiarch';
import { createAsyncQueue } from '@scrypted/common/src/async-queue'
import { ChildProcess, spawn as childSpawn } from "child_process";
@@ -9,8 +9,7 @@ export const TerminalServiceNativeId = 'terminalservice';
class InteractiveTerminal {
cp: IPty
constructor(cmd: string[]) {
const spawn = require('node-pty-prebuilt-multiarch').spawn as typeof ptySpawn;
constructor(cmd: string[], spawn: typeof ptySpawn) {
if (cmd?.length) {
this.cp = spawn(cmd[0], cmd.slice(1), {});
} else {
@@ -150,8 +149,7 @@ export class TerminalService extends ScryptedDeviceBase implements StreamService
}
}
finally {
if (cp)
cp.kill();
cp?.kill();
}
}
@@ -162,37 +160,40 @@ export class TerminalService extends ScryptedDeviceBase implements StreamService
continue;
if (Buffer.isBuffer(message)) {
if (cp)
cp.write(message);
cp?.write(message);
continue;
}
try {
const parsed = JSON.parse(message.toString());
if (parsed.dim) {
if (cp)
cp.resize(parsed.dim.cols, parsed.dim.rows);
cp?.resize(parsed.dim.cols, parsed.dim.rows);
} else if (parsed.eof) {
if (cp)
cp.sendEOF();
cp?.sendEOF();
} else if ("interactive" in parsed && !cp) {
if (parsed.interactive) {
cp = new InteractiveTerminal(parsed.cmd);
try {
const spawn = require('node-pty-prebuilt-multiarch').spawn as typeof ptySpawn;
cp = new InteractiveTerminal(parsed.cmd, spawn);
}
catch (e) {
this.console.error('Error starting pty', e);
queue.end(e);
return;
}
} else {
cp = new NoninteractiveTerminal(parsed.cmd);
}
registerChildListeners();
}
} catch {
if (cp)
cp.write(Buffer.from(message));
cp?.write(Buffer.from(message));
}
}
}
catch (e) {
this.console.log(e);
if (cp)
cp.kill();
cp?.kill();
}
})();

View File

@@ -38,7 +38,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.103",
"version": "0.3.2",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -77,9 +77,9 @@
"extraneous": true
},
"node_modules/@koush/axios-digest-auth": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.5.tgz",
"integrity": "sha512-EZMM0gMJ3hMUD4EuUqSwP6UGt5Vmw2TZtY7Ypec55AnxkExSXM0ySgPtqkAcnL43g1R27yAg/dQL7dRTLMqO3Q==",
"version": "0.8.6",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.6.tgz",
"integrity": "sha512-e/XKs7/BYpPQkces0Cm4dUmhT9hR0rjvnNZAVRyRnNWdQ8cyCMFWS9HIrMWOdzAocKDNBXi1vKjJ8CywrW5xgQ==",
"dependencies": {
"auth-header": "^1.0.0",
"axios": "^0.21.4"
@@ -125,9 +125,9 @@
"integrity": "sha512-CPPazq09YVDUNNVWo4oSPTQmtwIzHusZhQmahCKvIsk0/xH6U3QsMAv3sM+7+Q0B1K2KJ/Q38OND317uXs4NHA=="
},
"node_modules/axios": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"dependencies": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",
@@ -242,9 +242,9 @@
},
"dependencies": {
"@koush/axios-digest-auth": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.5.tgz",
"integrity": "sha512-EZMM0gMJ3hMUD4EuUqSwP6UGt5Vmw2TZtY7Ypec55AnxkExSXM0ySgPtqkAcnL43g1R27yAg/dQL7dRTLMqO3Q==",
"version": "0.8.6",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.6.tgz",
"integrity": "sha512-e/XKs7/BYpPQkces0Cm4dUmhT9hR0rjvnNZAVRyRnNWdQ8cyCMFWS9HIrMWOdzAocKDNBXi1vKjJ8CywrW5xgQ==",
"requires": {
"auth-header": "^1.0.0",
"axios": "^0.21.4"
@@ -319,9 +319,9 @@
"integrity": "sha512-CPPazq09YVDUNNVWo4oSPTQmtwIzHusZhQmahCKvIsk0/xH6U3QsMAv3sM+7+Q0B1K2KJ/Q38OND317uXs4NHA=="
},
"axios": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.4.0.tgz",
"integrity": "sha512-S4XCWMEmzvo64T9GfvQDOXgYRDJ/wsSZc7Jvdgx5u1sd0JwsuPLqb3SYmusag+edF6ziyMensPVqLTSc1PiSEA==",
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"requires": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/homekit",
"version": "1.2.31",
"version": "1.2.33",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/homekit",
"version": "1.2.31",
"version": "1.2.33",
"dependencies": {
"@koush/werift-src": "file:../../external/werift",
"check-disk-space": "^3.3.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/homekit",
"version": "1.2.31",
"version": "1.2.33",
"description": "HomeKit Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -69,10 +69,16 @@ addSupportedType({
resolutions: [
// 3840x2160@30 (4k).
[3840, 2160, 30],
// 3K
[2880, 1620, 30],
// 2MP
[2560, 1440, 30],
// 1920x1080@30 (1080p).
[1920, 1080, 30],
// 1280x720@30 (720p).
[1280, 720, 30],
[960, 540, 30],
[640, 360, 30],
// 320x240@15 (Apple Watch).
[320, 240, 15],
]
@@ -103,7 +109,7 @@ addSupportedType({
const openRecordingStreams = new Map<number, Deferred<any>>();
if (isRecordingEnabled) {
recordingDelegate = {
updateRecordingConfiguration(newConfiguration: CameraRecordingConfiguration ) {
updateRecordingConfiguration(newConfiguration: CameraRecordingConfiguration) {
configuration = newConfiguration;
},
handleRecordingStreamRequest(streamId: number): AsyncGenerator<RecordingPacket> {

View File

@@ -304,10 +304,16 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
const mediaOptions: RequestMediaStreamOptions = {
destination,
destinationId: session.prepareRequest.targetAddress,
destinationType: '@scrypted/homekit',
adaptive: true,
video: {
codec: 'h264',
bitrate: request.video.max_bit_rate * 1000,
// if these are sent as width/height rather than clientWidth/clientHeight,
// rebroadcast will always choose substream to treat it as a hard constraint.
// send as hint for adaptive bitrate.
clientWidth: request.video.width,
clientHeight: request.video.height,
},
audio: {
// opus is the preferred/default codec, and can be repacketized to fit any request if in use.

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/mqtt",
"version": "0.0.68",
"version": "0.0.76",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/mqtt",
"version": "0.0.68",
"version": "0.0.76",
"dependencies": {
"@types/node": "^16.6.1",
"aedes": "^0.46.1",

View File

@@ -41,5 +41,5 @@
"@scrypted/common": "file:../../common",
"@types/nunjucks": "^3.2.0"
},
"version": "0.0.68"
"version": "0.0.76"
}

View File

@@ -1,13 +1,14 @@
import { Settings, Setting, ScryptedDeviceBase, ScryptedInterface } from '@scrypted/sdk';
import { connect, Client } from 'mqtt';
import { ScriptableDeviceBase } from '../scrypted-eval';
import type {MqttProvider} from '../main';
export class MqttDeviceBase extends ScriptableDeviceBase implements Settings {
client: Client;
handler: any;
pathname: string;
constructor(nativeId: string) {
constructor(public provider: MqttProvider, nativeId: string) {
super(nativeId, undefined);
}
@@ -53,9 +54,36 @@ export class MqttDeviceBase extends ScriptableDeviceBase implements Settings {
this.client?.removeAllListeners();
this.client?.end();
this.client = undefined;
const url = new URL(this.storage.getItem('url'));
this.pathname = url.pathname.substring(1);
const urlWithoutPath = new URL(this.storage.getItem('url'));
const urlString = this.storage.getItem('url');
let url: URL;
let username: string;
let password: string;
const externalBroker = this.provider.storage.getItem('externalBroker');
if (urlString) {
this.console.log('Using device specific broker.', urlString);
url = new URL(urlString);
username = this.storage.getItem('username') || undefined;
password = this.storage.getItem('password') || undefined;
this.pathname = url.pathname.substring(1);
}
else if (externalBroker && !this.provider.isBrokerEnabled) {
this.console.log('Using external broker.', externalBroker);
url = new URL(externalBroker);
username = this.provider.storage.getItem('username') || undefined;
password = this.provider.storage.getItem('password') || undefined;
this.pathname = `${url.pathname.substring(1)}/${this.id}`;
}
else {
this.console.log('Using built in broker.');
const tcpPort = this.provider.storage.getItem('tcpPort') || '';
url = new URL(`mqtt://localhost:${tcpPort}/scrypted`);
username = this.provider.storage.getItem('username') || undefined;
password = this.provider.storage.getItem('password') || undefined;
this.pathname = `${url.pathname.substring(1)}/${this.id}`;
}
const urlWithoutPath = new URL(url);
urlWithoutPath.pathname = '';
const client = this.client = connect(urlWithoutPath.toString(), {

View File

@@ -1,8 +1,10 @@
import { Brightness, DeviceProvider, Lock, LockState, OnOff, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, Setting, Settings } from "@scrypted/sdk";
import { MqttClient, connect } from "mqtt";
import { MqttDeviceBase } from "../api/mqtt-device-base";
import crypto from 'crypto';
import { Brightness, DeviceProvider, Lock, LockState, MixinDeviceBase, OnOff, ScryptedDevice, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceProperty, Setting, Settings } from "@scrypted/sdk";
import { Client, MqttClient, connect } from "mqtt";
import { MqttDeviceBase } from "./api/mqtt-device-base";
import nunjucks from 'nunjucks';
import sdk from "@scrypted/sdk";
import type { MqttProvider } from './main';
const { deviceManager } = sdk;
@@ -59,8 +61,8 @@ typeMap.set('binary_sensor', {
export class MqttAutoDiscoveryProvider extends MqttDeviceBase implements DeviceProvider {
devices = new Map<string, MqttAutoDiscoveryDevice>();
constructor(nativeId: string) {
super(nativeId);
constructor(provider: MqttProvider, nativeId: string) {
super(provider, nativeId);
this.bind();
}
@@ -180,7 +182,7 @@ export class MqttAutoDiscoveryProvider extends MqttDeviceBase implements DeviceP
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
}
async putSetting(key: string, value: string) {
@@ -340,3 +342,90 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements OnOff
config.value_template, config.payload_unlock, 'UNLOCK');
}
}
interface AutoDiscoveryConfig {
component: string;
create: (mqttId: string, device: MixinDeviceBase<any>, topic: string) => any;
}
const autoDiscoveryMap = new Map<string, AutoDiscoveryConfig>();
function getAutoDiscoveryDevice(device: MixinDeviceBase<any>, mqttId: string) {
return {
dev: {
name: device.name,
// what the hell is this
"ids": crypto.createHash('sha256').update(`scrypted-${mqttId}-${device.id}`).digest().toString('hex').substring(0, 8),
"sw": device.info?.version,
"mdl": device.info?.model,
"mf": device.info?.manufacturer,
},
}
}
function createBinarySensorConfig(mqttId: string, device: MixinDeviceBase<any>, prop: ScryptedInterfaceProperty, topic: string) {
return {
state_topic: `${topic}/${prop}`,
payload_on: 'true',
payload_off: 'false',
...getAutoDiscoveryDevice(device, mqttId),
}
}
function addBinarySensor(iface: ScryptedInterface, prop: ScryptedInterfaceProperty) {
autoDiscoveryMap.set(iface, {
component: 'binary_sensor',
create(mqttId, device, topic) {
return createBinarySensorConfig(mqttId, device, prop, topic);
}
});
}
addBinarySensor(ScryptedInterface.MotionSensor, ScryptedInterfaceProperty.motionDetected);
addBinarySensor(ScryptedInterface.BinarySensor, ScryptedInterfaceProperty.binaryState);
addBinarySensor(ScryptedInterface.OccupancySensor, ScryptedInterfaceProperty.occupied);
addBinarySensor(ScryptedInterface.FloodSensor, ScryptedInterfaceProperty.flooded);
addBinarySensor(ScryptedInterface.AudioSensor, ScryptedInterfaceProperty.audioDetected);
addBinarySensor(ScryptedInterface.Online, ScryptedInterfaceProperty.online);
autoDiscoveryMap.set(ScryptedInterface.Thermometer, {
component: 'sensor',
create(mqttId, device, topic) {
return {
state_topic: `${topic}/${ScryptedInterfaceProperty.temperature}`,
value_template: '{{ value_json }}',
unit_of_measurement: 'C',
...getAutoDiscoveryDevice(device, mqttId),
}
}
});
autoDiscoveryMap.set(ScryptedInterface.HumiditySensor, {
component: 'sensor',
create(mqttId, device, topic) {
return {
state_topic: `${topic}/${ScryptedInterfaceProperty.humidity}`,
value_template: '{{ value_json }}',
unit_of_measurement: '%',
...getAutoDiscoveryDevice(device, mqttId),
}
}
});
export function publishAutoDiscovery(mqttId: string, client: Client, device: MixinDeviceBase<any>, topic: string, autoDiscoveryPrefix = 'homeassistant') {
for (const iface of device.interfaces) {
const found = autoDiscoveryMap.get(iface);
if (!found)
continue;
const config = found.create(mqttId, device, topic);
const nodeId = `scrypted-${mqttId}-${device.id}`;
config.unique_id = `scrypted-${mqttId}-${device.id}-${iface}`;
config.name = iface;
const configTopic = `${autoDiscoveryPrefix}/${found.component}/${nodeId}/${iface}/config`;
client.publish(configTopic, JSON.stringify(config), {
retain: true,
});
}
}

View File

@@ -1,5 +1,7 @@
import crypto from 'crypto';
import { createScriptDevice, ScriptDeviceImpl, tsCompile } from '@scrypted/common/src/eval/scrypted-eval';
import sdk, { DeviceCreator, DeviceCreatorSettings, DeviceProvider, EventListenerRegister, MixinProvider, Scriptable, ScriptSource, ScryptedDevice, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors, Setting, Settings } from '@scrypted/sdk';
import { StorageSettings } from "@scrypted/sdk/storage-settings"
import aedes, { AedesOptions } from 'aedes';
import fs from 'fs';
import http from 'http';
@@ -10,7 +12,7 @@ import ws from 'websocket-stream';
import { SettingsMixinDeviceBase, SettingsMixinDeviceOptions } from "../../../common/src/settings-mixin";
import { MqttClient, MqttClientPublishOptions, MqttSubscriptions } from './api/mqtt-client';
import { MqttDeviceBase } from './api/mqtt-device-base';
import { MqttAutoDiscoveryProvider } from './autodiscovery/autodiscovery';
import { MqttAutoDiscoveryProvider, publishAutoDiscovery } from './autodiscovery';
import { monacoEvalDefaults } from './monaco';
import { isPublishable } from './publishable-types';
import { scryptedEval } from './scrypted-eval';
@@ -29,8 +31,8 @@ const loopbackLight = filterExample('loopback-light.ts');
const { log, deviceManager, systemManager } = sdk;
class MqttDevice extends MqttDeviceBase implements Scriptable {
constructor(nativeId: string) {
super(nativeId);
constructor(provider: MqttProvider, nativeId: string) {
super(provider, nativeId);
}
async saveScript(source: ScriptSource): Promise<void> {
@@ -152,7 +154,7 @@ class MqttDevice extends MqttDeviceBase implements Scriptable {
}
}
const brokerProperties = ['httpPort', 'tcpPort', 'enableBroker', 'username', 'password'];
const brokerProperties = ['httpPort', 'tcpPort', 'enableBroker', 'username', 'password', 'externalBroker'];
class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
@@ -229,6 +231,18 @@ class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
this.connectClient();
}
publishState(client: Client) {
for (const iface of this.device.interfaces) {
for (const prop of ScryptedInterfaceDescriptors[iface]?.properties || []) {
let str = this[prop];
if (typeof str === 'object')
str = JSON.stringify(str);
client.publish(`${this.pathname}/${prop}`, str?.toString() || '');
}
}
}
connectClient() {
this.client?.end();
this.client = undefined;
@@ -236,17 +250,28 @@ class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
let url: URL;
let username: string;
let password: string;
const externalBroker = this.provider.storage.getItem('externalBroker');
if (urlString) {
this.console.log('Using device specific broker.', urlString);
url = new URL(urlString);
username = this.storage.getItem('username') || undefined;
password = this.storage.getItem('password') || undefined;
this.pathname = url.pathname.substring(1);
}
else {
const tcpPort = this.provider.storage.getItem('tcpPort') || '';
else if (externalBroker && !this.provider.isBrokerEnabled) {
this.console.log('Using external broker.', externalBroker);
url = new URL(externalBroker);
username = this.provider.storage.getItem('username') || undefined;
password = this.provider.storage.getItem('password') || undefined;
this.pathname = `${url.pathname.substring(1)}/${this.id}`;
}
else {
this.console.log('Using built in broker.');
const tcpPort = this.provider.storage.getItem('tcpPort') || '';
url = new URL(`mqtt://localhost:${tcpPort}/scrypted`);
username = this.provider.storage.getItem('username') || undefined;
password = this.provider.storage.getItem('password') || undefined;
this.pathname = `${url.pathname.substring(1)}/${this.id}`;
}
@@ -260,24 +285,51 @@ class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
});
client.setMaxListeners(Infinity);
const allProperties: string[] = [];
const allMethods: string[] = [];
for (const iface of this.device.interfaces) {
const methods = ScryptedInterfaceDescriptors[iface]?.methods || [];
allMethods.push(...methods);
const properties = ScryptedInterfaceDescriptors[iface]?.properties || [];
allProperties.push(...properties);
}
client.on('connect', packet => {
this.console.log('MQTT client connected, publishing current state.');
for (const iface of this.device.interfaces) {
for (const prop of ScryptedInterfaceDescriptors[iface]?.properties || []) {
let str = this[prop];
if (typeof str === 'object')
str = JSON.stringify(str);
client.publish(`${this.pathname}/${prop}`, str?.toString() || '');
}
for (const method of allMethods) {
client.subscribe(this.pathname + '/' + method);
}
})
publishAutoDiscovery(this.provider.storageSettings.values.mqttId, client, this, this.pathname, 'homeassistant');
client.subscribe('homeassistant/status');
this.publishState(client);
});
client.on('disconnect', () => this.console.log('mqtt client disconnected'));
client.on('error', e => {
this.console.log('mqtt client error', e);
});
client.on('message', async (messageTopic, message) => {
if (messageTopic === 'homeassistant/status') {
publishAutoDiscovery(this.provider.storageSettings.values.mqttId, client, this, this.pathname, 'homeassistant');
this.publishState(client);
return;
}
const method = messageTopic.substring(this.pathname.length + 1);
if (!allMethods.includes(method)) {
if (!allProperties.includes(method))
this.console.warn('unknown topic', method);
return;
}
try {
const args = JSON.parse(message.toString() || '[]');
await this.device[method](...args);
}
catch (e) {
this.console.warn('error invoking method', e);
}
});
return this.client;
}
@@ -289,10 +341,18 @@ class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
}
}
class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Settings, MixinProvider, DeviceCreator {
export class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Settings, MixinProvider, DeviceCreator {
devices = new Map<string, any>();
netServer: net.Server;
httpServer: http.Server;
storageSettings = new StorageSettings(this, {
mqttId: {
group: 'Advanced',
title: 'Autodiscovery ID',
// hide: true,
persistedDefaultValue: crypto.randomBytes(4).toString('hex'),
}
})
constructor(nativeId?: string) {
super(nativeId);
@@ -344,15 +404,25 @@ class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Setting
{
title: 'Enable MQTT Broker',
key: 'enableBroker',
description: 'Enable the Aedes MQTT Broker.',
description: 'Enable the built in Aedes MQTT Broker.',
// group: 'MQTT Broker',
type: 'boolean',
value: (this.storage.getItem('enableBroker') === 'true').toString(),
},
];
if (!this.isBrokerEnabled)
return ret;
if (!this.isBrokerEnabled) {
ret.push(
{
title: 'External Broker',
group: 'MQTT Broker',
key: 'externalBroker',
description: 'Specify the mqtt address of an external MQTT broker.',
placeholder: 'mqtt://192.168.1.100',
value: this.storage.getItem('externalBroker'),
}
)
}
ret.push(
{
@@ -369,26 +439,33 @@ class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Setting
key: 'password',
type: 'password',
description: 'Optional: Password used to authenticate with the MQTT broker.',
},
{
title: 'TCP Port',
key: 'tcpPort',
description: 'The port to use for TCP connections',
placeholder: '1883',
type: 'number',
group: 'MQTT Broker',
value: this.storage.getItem('tcpPort'),
},
{
title: 'HTTP Port',
key: 'httpPort',
description: 'The port to use for HTTP connections',
placeholder: '8888',
type: 'number',
group: 'MQTT Broker',
value: this.storage.getItem('httpPort'),
},
}
);
if (this.isBrokerEnabled) {
ret.push(
{
title: 'TCP Port',
key: 'tcpPort',
description: 'The port to use for TCP connections',
placeholder: '1883',
type: 'number',
group: 'MQTT Broker',
value: this.storage.getItem('tcpPort'),
},
{
title: 'HTTP Port',
key: 'httpPort',
description: 'The port to use for HTTP connections',
placeholder: '8888',
type: 'number',
group: 'MQTT Broker',
value: this.storage.getItem('httpPort'),
},
);
}
ret.push(...await this.storageSettings.getSettings());
return ret;
}
@@ -469,6 +546,9 @@ class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Setting
}
async putSetting(key: string, value: string | number) {
if (this.storageSettings.keys[key]) {
return this.storageSettings.putSetting(key, value);
}
this.storage.setItem(key, value.toString());
if (brokerProperties.includes(key)) {
@@ -482,7 +562,7 @@ class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Setting
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
}
createMqttDevice(nativeId: string): MqttDevice {
@@ -493,10 +573,10 @@ class MqttProvider extends ScryptedDeviceBase implements DeviceProvider, Setting
let ret = this.devices.get(nativeId);
if (!ret) {
if (nativeId.startsWith('autodiscovery:')) {
ret = new MqttAutoDiscoveryProvider(nativeId);
ret = new MqttAutoDiscoveryProvider(this, nativeId);
}
else if (nativeId.startsWith('0.')) {
ret = new MqttDevice(nativeId);
ret = new MqttDevice(this, nativeId);
await ret.bind();
}
if (ret)

View File

@@ -1,19 +1,18 @@
{
"name": "@scrypted/objectdetector",
"version": "0.1.8",
"version": "0.1.17",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/objectdetector",
"version": "0.1.8",
"version": "0.1.17",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"lodash": "^4.17.21",
"point-inside-polygon": "^1.0.3",
"polygon-overlap": "^1.0.5",
"polygon-clipping": "^0.15.3",
"semver": "^7.3.8"
},
"devDependencies": {
@@ -39,7 +38,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.107",
"version": "0.3.2",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -100,11 +99,6 @@
"integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==",
"dev": true
},
"node_modules/lines-intersect": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/lines-intersect/-/lines-intersect-1.0.0.tgz",
"integrity": "sha1-pgyHo9lXoIcdEU0FSmhatx9ygEI="
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
@@ -121,25 +115,14 @@
"node": ">=10"
}
},
"node_modules/point-inside-polygon": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/point-inside-polygon/-/point-inside-polygon-1.0.3.tgz",
"integrity": "sha512-ks7+jwmSHj8dcxClSfef2ftms57tGEE4rAwI4DHFX4U5vZqyEaCbHcfdmReWyJ5zDnOpsB5dTfDBmeFNa+449A=="
},
"node_modules/polygon-overlap": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/polygon-overlap/-/polygon-overlap-1.0.5.tgz",
"integrity": "sha1-DONSaovZhnSrBG/JLdAmZLbYJC0=",
"node_modules/polygon-clipping": {
"version": "0.15.3",
"resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz",
"integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==",
"dependencies": {
"lines-intersect": "1.0.0",
"point-inside-polygon": "1.0.1"
"splaytree": "^3.1.0"
}
},
"node_modules/polygon-overlap/node_modules/point-inside-polygon": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/point-inside-polygon/-/point-inside-polygon-1.0.1.tgz",
"integrity": "sha512-qceSGPZXGaELiy5p9f+8DXTnL35qxWhpLSubufeXlVltWKkT9IB0PJcM6mNJ7Nxj0z443qyQrXbWzERheWfC7w=="
},
"node_modules/semver": {
"version": "7.5.4",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz",
@@ -154,6 +137,11 @@
"node": ">=10"
}
},
"node_modules/splaytree": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.2.tgz",
"integrity": "sha512-4OM2BJgC5UzrhVnnJA4BkHKGtjXNzzUfpQjCO8I05xYPsfS/VuQDwjCGGMi8rYQilHEV4j8NBqTFbls/PZEE7A=="
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
@@ -231,11 +219,6 @@
"integrity": "sha512-21cFJr9z3g5dW8B0CVI9g2O9beqaThGQ6ZFBqHfwhzLDKUxaqTIy3vnfah/UPkfOiF2pLq+tGz+W8RyCskuslw==",
"dev": true
},
"lines-intersect": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/lines-intersect/-/lines-intersect-1.0.0.tgz",
"integrity": "sha1-pgyHo9lXoIcdEU0FSmhatx9ygEI="
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
@@ -249,25 +232,12 @@
"yallist": "^4.0.0"
}
},
"point-inside-polygon": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/point-inside-polygon/-/point-inside-polygon-1.0.3.tgz",
"integrity": "sha512-ks7+jwmSHj8dcxClSfef2ftms57tGEE4rAwI4DHFX4U5vZqyEaCbHcfdmReWyJ5zDnOpsB5dTfDBmeFNa+449A=="
},
"polygon-overlap": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/polygon-overlap/-/polygon-overlap-1.0.5.tgz",
"integrity": "sha1-DONSaovZhnSrBG/JLdAmZLbYJC0=",
"polygon-clipping": {
"version": "0.15.3",
"resolved": "https://registry.npmjs.org/polygon-clipping/-/polygon-clipping-0.15.3.tgz",
"integrity": "sha512-ho0Xx5DLkgxRx/+n4O74XyJ67DcyN3Tu9bGYKsnTukGAW6ssnuak6Mwcyb1wHy9MZc9xsUWqIoiazkZB5weECg==",
"requires": {
"lines-intersect": "1.0.0",
"point-inside-polygon": "1.0.1"
},
"dependencies": {
"point-inside-polygon": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/point-inside-polygon/-/point-inside-polygon-1.0.1.tgz",
"integrity": "sha512-qceSGPZXGaELiy5p9f+8DXTnL35qxWhpLSubufeXlVltWKkT9IB0PJcM6mNJ7Nxj0z443qyQrXbWzERheWfC7w=="
}
"splaytree": "^3.1.0"
}
},
"semver": {
@@ -278,6 +248,11 @@
"lru-cache": "^6.0.0"
}
},
"splaytree": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/splaytree/-/splaytree-3.1.2.tgz",
"integrity": "sha512-4OM2BJgC5UzrhVnnJA4BkHKGtjXNzzUfpQjCO8I05xYPsfS/VuQDwjCGGMi8rYQilHEV4j8NBqTFbls/PZEE7A=="
},
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/objectdetector",
"version": "0.1.8",
"version": "0.1.17",
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
"author": "Scrypted",
"license": "Apache-2.0",
@@ -42,13 +42,11 @@
],
"realfs": true
},
"optionalDependencies": {},
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"lodash": "^4.17.21",
"point-inside-polygon": "^1.0.3",
"polygon-overlap": "^1.0.5",
"polygon-clipping": "^0.15.3",
"semver": "^7.3.8"
},
"devDependencies": {

View File

@@ -1,23 +1,21 @@
import { Deferred } from '@scrypted/common/src/deferred';
import { sleep } from '@scrypted/common/src/sleep';
import sdk, { Camera, DeviceCreator, DeviceCreatorSettings, DeviceProvider, DeviceState, EventListenerRegister, Image, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionModel, ObjectDetectionTypes, ObjectDetectionZone, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
import sdk, { Camera, DeviceCreator, DeviceCreatorSettings, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionModel, ObjectDetectionTypes, ObjectDetectionZone, ObjectDetector, ObjectsDetected, Point, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, SettingValue, Settings, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import crypto from 'crypto';
import os from 'os';
import { AutoenableMixinProvider } from "../../../common/src/autoenable-mixin-provider";
import { SettingsMixinDeviceBase } from "../../../common/src/settings-mixin";
import { FFmpegVideoFrameGenerator } from './ffmpeg-videoframes';
import { getMaxConcurrentObjectDetectionSessions } from './performance-profile';
import { insidePolygon, normalizeBox, polygonOverlap } from './polygon';
import { serverSupportsMixinEventMasking } from './server-version';
import { SMART_MOTIONSENSOR_PREFIX, SmartMotionSensor, createObjectDetectorStorageSetting } from './smart-motionsensor';
import { getAllDevices, safeParseJson } from './util';
import { createObjectDetectorStorageSetting, SMART_MOTIONSENSOR_PREFIX, SmartMotionSensor } from './smart-motionsensor';
const polygonOverlap = require('polygon-overlap');
const insidePolygon = require('point-inside-polygon');
const { systemManager } = sdk;
const defaultDetectionDuration = 20;
const defaultPostMotionAnalysisDuration = 20;
const defaultMotionDuration = 30;
const BUILTIN_MOTION_SENSOR_ASSIST = 'Assist';
@@ -25,10 +23,11 @@ const BUILTIN_MOTION_SENSOR_REPLACE = 'Replace';
const objectDetectionPrefix = `${ScryptedInterface.ObjectDetection}:`;
type ClipPath = [number, number][];
type ClipPath = Point[];
type Zones = { [zone: string]: ClipPath };
interface ZoneInfo {
exclusion?: boolean;
filterMode?: 'include' | 'exclude' | 'observe';
type?: 'Intersect' | 'Contain';
classes?: string[];
scoreThreshold?: number;
@@ -42,23 +41,13 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
detections = new Map<string, MediaObject>();
cameraDevice: ScryptedDevice & Camera & VideoCamera & MotionSensor & ObjectDetector;
storageSettings = new StorageSettings(this, {
newPipeline: {
title: 'Video Pipeline',
description: 'Configure how frames are provided to the video analysis pipeline.',
onGet: async () => {
const choices = [
'Default',
...getAllDevices().filter(d => d.interfaces.includes(ScryptedInterface.VideoFrameGenerator)).map(d => d.name),
];
return {
choices,
}
},
onPut: () => {
this.endObjectDetection();
this.maybeStartDetection();
},
defaultValue: 'Default',
zones: {
title: 'Zones',
type: 'string',
description: 'Enter the name of a new zone or delete an existing zone.',
multiple: true,
combobox: true,
choices: [],
},
motionSensorSupplementation: {
title: 'Built-In Motion Sensor',
@@ -74,13 +63,12 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
this.maybeStartDetection();
}
},
detectionDurationDEPRECATED: {
hide: true,
title: 'Detection Duration',
postMotionAnalysisDuration: {
title: 'Post Motion Analysis Duration',
subgroup: 'Advanced',
description: 'The duration in seconds to analyze video when motion occurs.',
description: 'The duration in seconds to analyze video after motion ends.',
type: 'number',
defaultValue: defaultDetectionDuration,
defaultValue: defaultPostMotionAnalysisDuration,
},
motionDuration: {
title: 'Motion Duration',
@@ -88,6 +76,25 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
type: 'number',
defaultValue: defaultMotionDuration,
},
newPipeline: {
subgroup: 'Advanced',
title: 'Decoder',
description: 'Configure how frames are provided to the video analysis pipeline.',
onGet: async () => {
const choices = [
'Default',
...getAllDevices().filter(d => d.interfaces.includes(ScryptedInterface.VideoFrameGenerator)).map(d => d.name),
];
return {
choices,
}
},
onPut: () => {
this.endObjectDetection();
this.maybeStartDetection();
},
defaultValue: 'Default',
},
});
motionTimeout: NodeJS.Timeout;
detectionIntervalTimeout: NodeJS.Timeout;
@@ -122,6 +129,14 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
this.maybeStartDetection();
}, 60000);
this.storageSettings.settings.zones.mapGet = () => Object.keys(this.zones);
this.storageSettings.settings.zones.onGet = async () => {
return {
group,
choices: Object.keys(this.zones),
}
}
}
clearMotionTimeout() {
@@ -147,8 +162,18 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
const ret: { [key: string]: any } = {};
for (const setting of this.settings) {
ret[setting.key] = (setting.multiple ? safeParseJson(this.storage.getItem(setting.key)) : this.storage.getItem(setting.key))
|| setting.value;
let value: any;
if (setting.multiple) {
value = safeParseJson(this.storage.getItem(setting.key));
if (!value?.length)
value = undefined;
}
else {
value = this.storage.getItem(setting.key);
}
value ||= setting.value;
ret[setting.key] = value;
}
if (this.hasMotionType)
@@ -189,12 +214,25 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (!this.hasMotionType) {
this.motionListener = this.cameraDevice.listen(ScryptedInterface.MotionSensor, async () => {
if (!this.cameraDevice.motionDetected) {
// const minimumEndTme = this.detectionStartTime + this.storageSettings.values.minimumDetectionDuration * 1000;
// const sleepTime = minimumEndTme - Date.now();
const sleepTime = this.storageSettings.values.postMotionAnalysisDuration * 1000;
if (sleepTime > 0) {
this.console.log('Motion stopped. Waiting additional time for minimum detection duration:', sleepTime);
await sleep(sleepTime);
if (this.motionDetected) {
this.console.log('Motion resumed during wait. Continuing detection.');
return;
}
}
if (this.detectorRunning) {
// allow anaysis due to user request.
if (this.analyzeStop > Date.now())
return;
this.console.log('motion stopped, cancelling ongoing detection')
this.console.log('Motion stopped, stopping detection.')
this.endObjectDetection();
}
return;
@@ -217,14 +255,14 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (this.motionDetected)
return;
if (!this.detectorRunning)
this.console.log('built in motion sensor started motion, starting video detection.');
this.console.log('Built in motion sensor started motion, starting video detection.');
this.startPipelineAnalysis();
return;
}
this.clearMotionTimeout();
if (this.detectorRunning) {
this.console.log('built in motion sensor ended motion, stopping video detection.')
this.console.log('Built in motion sensor ended motion, stopping video detection.')
this.endObjectDetection();
}
if (this.motionDetected)
@@ -342,7 +380,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}, 30000);
signal.promise.finally(() => clearInterval(interval));
const currentDetections = new Set<string>();
const currentDetections = new Map<string, number>();
let lastReport = 0;
updatePipelineStatus('waiting result');
@@ -354,11 +392,11 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
continue;
for (const [key, zone] of Object.entries(mixin.zones)) {
const zi = mixin.zoneInfos[key];
if (!zone?.length || zone?.length < 3)
if (!zone?.length || zone?.length < 3 || zi?.filterMode === 'observe')
continue;
const odz: ObjectDetectionZone = {
classes: mixin.hasMotionType ? ['motion'] : zi?.classes,
exclusion: zi?.exclusion,
exclusion: zi?.filterMode ? zi?.filterMode === 'exclude' : zi?.exclusion,
path: zone,
type: zi?.type,
}
@@ -417,12 +455,12 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
// this.console.log('Zone filtered detections:', numZonedDetections - numOriginalDetections);
for (const d of detected.detected.detections) {
currentDetections.add(d.className);
currentDetections.set(d.className, Math.max(currentDetections.get(d.className) || 0, d.score));
}
const now = Date.now();
if (now > lastReport + 10000) {
const found = [...currentDetections.values()];
const found = [...currentDetections.entries()].map(([className, score]) => `${className} (${score})`);
if (!found.length)
found.push('[no detections]');
this.console.log(`[${Math.round((now - start) / 100) / 10}s] Detected:`, ...found);
@@ -462,19 +500,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
}
normalizeBox(boundingBox: [number, number, number, number], inputDimensions: [number, number]) {
let [x, y, width, height] = boundingBox;
let x2 = x + width;
let y2 = y + height;
// the zones are point paths in percentage format
x = x * 100 / inputDimensions[0];
y = y * 100 / inputDimensions[1];
x2 = x2 * 100 / inputDimensions[0];
y2 = y2 * 100 / inputDimensions[1];
const box = [[x, y], [x2, y], [x2, y2], [x, y2]];
return box;
}
applyZones(detection: ObjectsDetected) {
// determine zones of the objects, if configured.
if (!detection.detections)
@@ -485,7 +510,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
continue;
o.zones = []
const box = this.normalizeBox(o.boundingBox, detection.inputDimensions);
const box = normalizeBox(o.boundingBox, detection.inputDimensions);
let included: boolean;
for (const [zone, zoneValue] of Object.entries(this.zones)) {
@@ -495,13 +520,14 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
const zoneInfo = this.zoneInfos[zone];
const exclusion = zoneInfo?.filterMode ? zoneInfo.filterMode === 'exclude' : zoneInfo?.exclusion;
// track if there are any inclusion zones
if (!zoneInfo?.exclusion && !included)
if (!exclusion && !included && zoneInfo?.filterMode !== 'observe')
included = false;
let match = false;
if (zoneInfo?.type === 'Contain') {
match = insidePolygon(box[0], zoneValue) &&
match = insidePolygon(box[0] as Point, zoneValue) &&
insidePolygon(box[1], zoneValue) &&
insidePolygon(box[2], zoneValue) &&
insidePolygon(box[3], zoneValue);
@@ -516,12 +542,14 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (match) {
o.zones.push(zone);
if (zoneInfo?.exclusion && match) {
copy = copy.filter(c => c !== o);
break;
}
if (zoneInfo?.filterMode !== 'observe') {
if (exclusion && match) {
copy = copy.filter(c => c !== o);
break;
}
included = true;
included = true;
}
}
}
@@ -529,7 +557,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
// use a default inclusion zone that crops the top and bottom to
// prevents errant motion from the on screen time changing every second.
if (this.hasMotionType && included === undefined) {
const defaultInclusionZone = [[0, 10], [100, 10], [100, 90], [0, 90]];
const defaultInclusionZone: ClipPath = [[0, 10], [100, 10], [100, 90], [0, 90]];
included = polygonOverlap(box, defaultInclusionZone);
}
@@ -550,20 +578,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (!this.motionDetected)
this.motionDetected = true;
// if (this.motionSensorSupplementation === BUILTIN_MOTION_SENSOR_ASSIST) {
// if (!this.motionDetected) {
// this.motionDetected = true;
// this.console.log(`${this.objectDetection.name} confirmed motion, stopping video detection.`)
// this.endObjectDetection();
// this.clearMotionTimeout();
// }
// }
// else {
// if (!this.motionDetected)
// this.motionDetected = true;
// this.resetMotionTimeout();
// }
const areas = detection.detections.filter(d => d.className === 'motion' && d.score !== 1).map(d => d.score)
if (areas.length)
this.console.log('detection areas', areas);
@@ -652,32 +666,30 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
if (this.settings) {
settings.push(...this.settings.map(setting =>
Object.assign({}, setting, {
settings.push(...this.settings.map(setting => {
let value: any;
if (setting.multiple) {
value = safeParseJson(this.storage.getItem(setting.key));
if (!value?.length)
value = undefined;
}
else {
value = this.storage.getItem(setting.key);
}
value ||= setting.value;
return Object.assign({}, setting, {
placeholder: setting.placeholder?.toString(),
value: (setting.multiple ? safeParseJson(this.storage.getItem(setting.key)) : this.storage.getItem(setting.key))
|| setting.value,
} as Setting))
);
value,
} as Setting);
}));
}
this.storageSettings.settings.motionSensorSupplementation.hide = !this.hasMotionType || !this.mixinDeviceInterfaces.includes(ScryptedInterface.MotionSensor);
this.storageSettings.settings.detectionDurationDEPRECATED.hide = this.hasMotionType;
this.storageSettings.settings.postMotionAnalysisDuration.hide = this.hasMotionType;
this.storageSettings.settings.motionDuration.hide = !this.hasMotionType;
settings.push(...await this.storageSettings.getSettings());
settings.push({
key: 'zones',
title: 'Zones',
type: 'string',
description: 'Enter the name of a new zone or delete an existing zone.',
multiple: true,
value: Object.keys(this.zones),
choices: Object.keys(this.zones),
combobox: true,
});
for (const [name, value] of Object.entries(this.zones)) {
const zi = this.zoneInfos[name];
@@ -690,13 +702,26 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
value: JSON.stringify(value),
});
// settings.push({
// subgroup,
// key: `zoneinfo-exclusion-${name}`,
// title: `Exclusion Zone`,
// description: 'Detections in this zone will be excluded.',
// type: 'boolean',
// value: zi?.exclusion,
// });
settings.push({
subgroup,
key: `zoneinfo-exclusion-${name}`,
title: `Exclusion Zone`,
description: 'Detections in this zone will be excluded.',
type: 'boolean',
value: zi?.exclusion,
key: `zoneinfo-filterMode-${name}`,
title: `Filter Mode`,
description: 'The filter mode used by this zone. The Default is include. Zones set to observe will not affect filtering and can be used for automations.',
choices: [
'Default',
'include',
'exclude',
'observe',
],
value: zi?.filterMode || (zi?.exclusion ? 'exclude' : undefined) || 'Default',
});
settings.push({
@@ -802,7 +827,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
else {
const settings = this.getCurrentSettings();
if (settings && settings[key]) {
if (settings && key in settings) {
this.storage.setItem(key, vs);
settings[key] = value;
}
@@ -896,7 +921,7 @@ interface ObjectDetectionStatistics {
sampleTime: number;
}
class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings, DeviceProvider, DeviceCreator {
export class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings, DeviceProvider, DeviceCreator {
currentMixins = new Set<ObjectDetectorMixin>();
objectDetectionStatistics = new Map<number, ObjectDetectionStatistics>();
statsSnapshotTime: number;
@@ -1103,7 +1128,7 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
if (nativeId === 'ffmpeg')
ret = this.devices.get(nativeId) || new FFmpegVideoFrameGenerator('ffmpeg');
if (nativeId?.startsWith(SMART_MOTIONSENSOR_PREFIX))
ret = this.devices.get(nativeId) || new SmartMotionSensor(nativeId);
ret = this.devices.get(nativeId) || new SmartMotionSensor(this, nativeId);
if (ret)
this.devices.set(nativeId, ret);
@@ -1164,13 +1189,14 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
name,
type: ScryptedDeviceType.Sensor,
interfaces: [
ScryptedInterface.Camera,
ScryptedInterface.MotionSensor,
ScryptedInterface.Settings,
ScryptedInterface.Readme,
]
});
const sensor = new SmartMotionSensor(nativeId);
const sensor = new SmartMotionSensor(this, nativeId);
sensor.storageSettings.values.objectDetector = objectDetector?.id;
return id;

View File

@@ -0,0 +1,27 @@
import { Point } from '@scrypted/sdk';
import polygonClipping from 'polygon-clipping';
// const polygonOverlap = require('polygon-overlap');
// const insidePolygon = require('point-inside-polygon');
export function polygonOverlap(p1: Point[], p2: Point[]) {
const intersect = polygonClipping.intersection([p1], [p2]);
return !!intersect.length;
}
export function insidePolygon(point: Point, polygon: Point[]) {
const intersect = polygonClipping.intersection([polygon], [[point, [point[0] + 1, point[1]], [point[0] + 1, point[1] + 1]]]);
return !!intersect.length;
}
export function normalizeBox(boundingBox: [number, number, number, number], inputDimensions: [number, number]): [Point, Point, Point, Point] {
let [x, y, width, height] = boundingBox;
let x2 = x + width;
let y2 = y + height;
// the zones are point paths in percentage format
x = x * 100 / inputDimensions[0];
y = y * 100 / inputDimensions[1];
x2 = x2 * 100 / inputDimensions[0];
y2 = y2 * 100 / inputDimensions[1];
return [[x, y], [x2, y], [x2, y2], [x, y2]];
}

View File

@@ -1,5 +1,6 @@
import sdk, { EventListenerRegister, MotionSensor, ObjectDetector, ObjectsDetected, Readme, ScryptedDevice, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, SettingValue, Settings } from "@scrypted/sdk";
import sdk, { Camera, EventListenerRegister, MediaObject, MotionSensor, ObjectDetector, ObjectsDetected, Readme, RequestPictureOptions, ResponsePictureOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, SettingValue, Settings } from "@scrypted/sdk";
import { StorageSetting, StorageSettings } from "@scrypted/sdk/storage-settings";
import type { ObjectDetectionPlugin } from "./main";
export const SMART_MOTIONSENSOR_PREFIX = 'smart-motionsensor-';
@@ -13,7 +14,7 @@ export function createObjectDetectorStorageSetting(): StorageSetting {
};
}
export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, Readme, MotionSensor {
export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, Readme, MotionSensor, Camera {
storageSettings = new StorageSettings(this, {
objectDetector: createObjectDetectorStorageSetting(),
detections: {
@@ -28,11 +29,20 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
type: 'number',
defaultValue: 60,
},
zones: {
title: 'Zones',
description: 'Optional: The sensor will only be triggered when an object is in any of the following zones.',
multiple: true,
combobox: true,
choices: [
],
},
});
listener: EventListenerRegister;
timeout: NodeJS.Timeout;
lastPicture: Promise<MediaObject>;
constructor(nativeId?: ScryptedNativeId) {
constructor(public plugin: ObjectDetectionPlugin, nativeId?: ScryptedNativeId) {
super(nativeId);
this.storageSettings.settings.detections.onGet = async () => {
@@ -48,7 +58,47 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
this.storageSettings.settings.objectDetector.onPut = () => this.rebind();
this.storageSettings.settings.zones.onPut = () => this.rebind();
this.storageSettings.settings.zones.onGet = async () => {
const objectDetector: ObjectDetector & ScryptedDevice = this.storageSettings.values.objectDetector;
const objectDetections = [...this.plugin.currentMixins.values()]
.map(d => [...d.currentMixins.values()].filter(dd => !dd.hasMotionType)).flat();
const mixin = objectDetections.find(m => m.id === objectDetector?.id);
const zones = new Set(Object.keys(mixin?.getZones() || {}));
for (const z of this.storageSettings.values.zones || []) {
zones.add(z);
}
return {
choices: [...zones],
};
};
this.rebind();
if (!this.providedInterfaces.includes(ScryptedInterface.Camera)) {
sdk.deviceManager.onDeviceDiscovered({
name: this.providedName,
nativeId: this.nativeId,
type: this.providedType,
interfaces: [
ScryptedInterface.Camera,
ScryptedInterface.MotionSensor,
ScryptedInterface.Settings,
ScryptedInterface.Readme,
]
})
}
}
async takePicture(options?: RequestPictureOptions): Promise<MediaObject> {
return this.lastPicture;
}
async getPictureOptions(): Promise<ResponsePictureOptions[]> {
return;
}
resetTrigger() {
@@ -80,7 +130,6 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
if (!detections?.length)
return;
const console = sdk.deviceManager.getMixinConsole(objectDetector.id, this.nativeId);
this.listener = objectDetector.listen(ScryptedInterface.ObjectDetector, (source, details, data) => {
@@ -88,6 +137,23 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
const match = detected.detections?.find(d => {
if (!detections.includes(d.className))
return false;
const zones: string[] = this.storageSettings.values.zones;
if (zones?.length) {
if (d.zones) {
let found = false;
for (const z of d.zones) {
if (zones.includes(z)) {
found = true;
break;
}
}
if (!found)
return false;
}
else {
this.console.warn('Camera does not provide Zones in detection event. Zone filter will not be applied.');
}
}
if (!d.movement)
return true;
return d.movement.moving;
@@ -95,6 +161,8 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
if (match) {
if (!this.motionDetected)
console.log('Smart Motion Sensor triggered on', match);
if (detected.detectionId)
this.lastPicture = objectDetector.getDetectionInput(detected.detectionId, details.eventId);
this.trigger();
}
});

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/prebuffer-mixin",
"version": "0.9.101",
"version": "0.10.8",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/prebuffer-mixin",
"version": "0.9.101",
"version": "0.10.8",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/prebuffer-mixin",
"version": "0.9.101",
"version": "0.10.8",
"description": "Video Stream Rebroadcast, Prebuffer, and Management Plugin for Scrypted.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -203,14 +203,14 @@ class PrebufferSession {
return;
this.console.log(this.streamName, 'prebuffer session started');
this.parserSessionPromise = this.startPrebufferSession();
this.parserSessionPromise.catch(e => {
this.console.error(this.streamName, 'prebuffer session ended with error', e);
this.parserSessionPromise = undefined;
});
this.parserSessionPromise.then(pso => pso.killed.finally(() => {
this.console.error(this.streamName, 'prebuffer session ended');
this.parserSessionPromise = undefined;
}));
}))
.catch(e => {
this.console.error(this.streamName, 'prebuffer session ended with error', e);
this.parserSessionPromise = undefined;
});
}
canUseRtspParser(mediaStreamOptions: MediaStreamOptions) {
@@ -470,8 +470,14 @@ class PrebufferSession {
this.console.log('bypassing ffmpeg: using scrypted rfc4571 parser')
const json = await mediaManager.convertMediaObjectToJSON<any>(mo, 'x-scrypted/x-rfc4571');
const { url, sdp, mediaStreamOptions } = json;
sessionMso = mediaStreamOptions;
session = startRFC4571Parser(this.console, connectRFC4571Parser(url), sdp, mediaStreamOptions, rbo);
const rtspParser = createRtspParser();
rbo.parsers.rtsp = rtspParser;
session = startRFC4571Parser(this.console, connectRFC4571Parser(url), sdp, mediaStreamOptions, {
timeout: 10000,
});
this.sdp = session.sdp.then(buffers => Buffer.concat(buffers).toString());
}
else {
@@ -531,9 +537,13 @@ class PrebufferSession {
else if (parser === FFMPEG_PARSER_TCP)
ffmpegInput.inputArguments = ['-rtsp_transport', 'tcp', '-i', ffmpegInput.url];
// create missing pts from dts so mpegts and mp4 muxing does not fail
const extraInputArguments = this.storage.getItem(this.ffmpegInputArgumentsKey) || DEFAULT_FFMPEG_INPUT_ARGUMENTS;
const userInputArguments = this.storage.getItem(this.ffmpegInputArgumentsKey);
const extraInputArguments = userInputArguments || DEFAULT_FFMPEG_INPUT_ARGUMENTS;
const extraOutputArguments = this.storage.getItem(this.ffmpegOutputArgumentsKey) || '';
ffmpegInput.inputArguments.unshift(...extraInputArguments.split(' '));
// ehh this seems to cause issues with frames being updated in the webassembly decoder..?
// if (!userInputArguments && (ffmpegInput.container === 'rtmp' || ffmpegInput.url?.startsWith('rtmp:')))
// ffmpegInput.inputArguments.unshift('-use_wallclock_as_timestamps', '1');
// extraOutputArguments must contain full codec information
if (extraOutputArguments) {
@@ -554,7 +564,7 @@ class PrebufferSession {
}
}
if (this.usingScryptedParser) {
if (this.usingScryptedParser && !isRfc4571) {
// watch the stream for 10 seconds to see if an weird nalu is encountered.
// if one is found and using scrypted parser as default, will need to restart rebroadcast to prevent
// downstream issues.
@@ -1019,6 +1029,11 @@ class PrebufferSession {
if (this.audioDisabled) {
mediaStreamOptions.audio = null;
}
else if (audioSection) {
mediaStreamOptions.audio ||= {};
mediaStreamOptions.audio.codec ||= audioSection.rtpmap.codec;
mediaStreamOptions.audio.sampleRate ||= audioSection.rtpmap.clock;
}
if (session.inputVideoResolution?.width && session.inputVideoResolution?.height) {
// this may be an audio only request.
@@ -1069,6 +1084,7 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
streamSettings = createStreamSettings(this);
rtspServer: net.Server;
settingsListener: EventListenerRegister;
videoCameraListener: EventListenerRegister;
constructor(public getTranscodeStorageSettings: () => Promise<any>, options: SettingsMixinDeviceOptions<VideoCamera & VideoCameraConfiguration>) {
super(options);
@@ -1091,6 +1107,7 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
})();
this.settingsListener = systemManager.listenDevice(this.id, ScryptedInterface.Settings, () => this.ensurePrebufferSessions());
this.videoCameraListener = systemManager.listenDevice(this.id, ScryptedInterface.VideoCamera, () => this.reinitiatePrebufferSessions());
}
async startRtspServer() {
@@ -1442,6 +1459,16 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
return settings;
}
async reinitiatePrebufferSessions() {
const sessions = this.sessions;
this.sessions = new Map();
// kill and reinitiate the prebuffers.
for (const session of sessions.values()) {
session?.parserSessionPromise?.then(session => session.kill(new Error('rebroadcast settings changed')));
}
this.ensurePrebufferSessions();
}
async putMixinSetting(key: string, value: SettingValue): Promise<void> {
if (this.streamSettings.storageSettings.settings[key])
await this.streamSettings.storageSettings.putSetting(key, value);
@@ -1452,14 +1479,7 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
if (this.streamSettings.storageSettings.settings[key]?.group === 'Transcoding')
return;
const sessions = this.sessions;
this.sessions = new Map();
// kill and reinitiate the prebuffers.
for (const session of sessions.values()) {
session?.parserSessionPromise?.then(session => session.kill(new Error('rebroadcast settings changed')));
}
this.ensurePrebufferSessions();
this.reinitiatePrebufferSessions();
}
getPrebufferedStreams(msos?: ResponseMediaStreamOptions[]) {
@@ -1501,6 +1521,7 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
async release() {
closeQuiet(this.rtspServer);
this.settingsListener.removeListener();
this.videoCameraListener.removeListener();
this.online = true;
super.release();
this.console.log('prebuffer sessions releasing if started');
@@ -1669,8 +1690,10 @@ export class RebroadcastPlugin extends AutoenableMixinProvider implements MixinP
}
async canMixin(type: ScryptedDeviceType, interfaces: string[]): Promise<string[]> {
if (type !== ScryptedDeviceType.Doorbell && type !== ScryptedDeviceType.Camera)
return;
if (!interfaces.includes(ScryptedInterface.VideoCamera))
return null;
return;
const ret = [ScryptedInterface.VideoCamera, ScryptedInterface.Settings, ScryptedInterface.Online, REBROADCAST_MIXIN_INTERFACE_TOKEN];
return ret;
}

View File

@@ -64,7 +64,9 @@ export function connectRFC4571Parser(url: string) {
return socket;
}
export function startRFC4571Parser(console: Console, socket: Readable, sdp: string, mediaStreamOptions: ResponseMediaStreamOptions, options?: ParserOptions<"rtsp">): ParserSession<"rtsp"> {
export function startRFC4571Parser(console: Console, socket: Readable, sdp: string, mediaStreamOptions: ResponseMediaStreamOptions, options?: {
timeout?: number,
}): ParserSession<"rtsp"> {
let isActive = true;
const events = new EventEmitter();
// need this to prevent kill from throwing due to uncaught Error during cleanup

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/reolink",
"version": "0.0.48",
"version": "0.0.49",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/reolink",
"version": "0.0.48",
"version": "0.0.49",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
@@ -37,7 +37,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.103",
"version": "0.3.2",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -73,9 +73,9 @@
}
},
"node_modules/@koush/axios-digest-auth": {
"version": "0.8.5",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.5.tgz",
"integrity": "sha512-EZMM0gMJ3hMUD4EuUqSwP6UGt5Vmw2TZtY7Ypec55AnxkExSXM0ySgPtqkAcnL43g1R27yAg/dQL7dRTLMqO3Q==",
"version": "0.8.6",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.6.tgz",
"integrity": "sha512-e/XKs7/BYpPQkces0Cm4dUmhT9hR0rjvnNZAVRyRnNWdQ8cyCMFWS9HIrMWOdzAocKDNBXi1vKjJ8CywrW5xgQ==",
"dependencies": {
"auth-header": "^1.0.0",
"axios": "^0.21.4"
@@ -181,12 +181,12 @@
}
},
"node_modules/onvif": {
"version": "0.6.8",
"resolved": "https://registry.npmjs.org/onvif/-/onvif-0.6.8.tgz",
"integrity": "sha512-GkrBlgusJCAGRBxfLBmykJpfKbPY16mChERORqt5J7aFt7y48KyqoynS+w7D3nZcjWPKR7WyHiJV9XN4e+Foiw==",
"version": "0.6.9",
"resolved": "https://registry.npmjs.org/onvif/-/onvif-0.6.9.tgz",
"integrity": "sha512-aKr14CG8dkHMEF3bUqBZA1OdZi4ffzfmR5E1Y3v4WpweCGkywERAQDhQM3PRUvLNtqnWbcDEcq4l7gBSZ7JCyA==",
"dependencies": {
"lodash.get": "^4.4.2",
"xml2js": "^0.4.23"
"xml2js": "^0.5.0"
},
"engines": {
"node": ">=6.0"
@@ -220,9 +220,9 @@
]
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz",
"integrity": "sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA=="
},
"node_modules/setprototypeof": {
"version": "1.2.0",
@@ -257,9 +257,9 @@
}
},
"node_modules/xml2js": {
"version": "0.4.23",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
"integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz",
"integrity": "sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/reolink",
"version": "0.0.48",
"version": "0.0.49",
"description": "Reolink Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/ring",
"version": "0.0.135",
"version": "0.0.136",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/ring",
"version": "0.0.135",
"version": "0.0.136",
"dependencies": {
"@koush/ring-client-api": "file:../../external/ring-client-api",
"@scrypted/common": "file:../../common",

View File

@@ -44,5 +44,5 @@
"got": "11.8.6",
"socket.io-client": "^2.5.0"
},
"version": "0.0.135"
"version": "0.0.136"
}

View File

@@ -74,6 +74,7 @@ export class RtspCamera extends CameraBase<UrlMediaStreamOptions> {
createMediaStreamUrl(stringUrl: string, vso: ResponseMediaStreamOptions) {
const ret: MediaStreamUrl = {
container: vso.container,
url: stringUrl,
mediaStreamOptions: vso,
};

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/snapshot",
"version": "0.2.11",
"version": "0.2.22",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/snapshot",
"version": "0.2.11",
"version": "0.2.22",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@types/node": "^18.16.18",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/snapshot",
"version": "0.2.11",
"version": "0.2.22",
"description": "Snapshot Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
@@ -29,7 +29,8 @@
"Settings",
"MixinProvider",
"BufferConverter",
"DeviceProvider"
"DeviceProvider",
"HttpRequestHandler"
]
},
"dependencies": {

View File

@@ -1,12 +1,11 @@
import fs from 'fs';
import { addVideoFilterArguments } from '@scrypted/common/src/ffmpeg-helpers';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
import { timeoutFunction } from '@scrypted/common/src/promise-utils';
import { sleep } from '@scrypted/common/src/sleep';
import child_process, { ChildProcess } from 'child_process';
import { once } from 'events';
import { Writable } from 'stream';
import { Pipe2Jpeg } from './pipe2jpeg';
import { timeoutFunction } from '@scrypted/common/src/promise-utils';
export interface FFmpegImageFilterOptions {
console?: Console,
@@ -133,13 +132,11 @@ export function ffmpegFilterImage(inputArguments: string[], options: FFmpegImage
];
}
else {
outputArguments = [
'-frames:v', '1',
'-f', 'image2',
'pipe:3',
];
}
const args: string[] = [

View File

@@ -0,0 +1,24 @@
import { BufferConverter, FFmpegInput, MediaObjectOptions, ScryptedDeviceBase, ScryptedMimeTypes, ScryptedNativeId } from '@scrypted/sdk';
import MIMEType from 'whatwg-mimetype';
import type { SnapshotPlugin } from './main';
import { parseImageOp, processImageOp } from './parse-dims';
export const ImageConverterNativeId = 'imageconverter';
export class ImageConverter extends ScryptedDeviceBase implements BufferConverter {
constructor(public plugin: SnapshotPlugin, nativeId: ScryptedNativeId) {
super(nativeId);
this.fromMimeType = ScryptedMimeTypes.FFmpegInput;
this.toMimeType = 'image/jpeg';
}
async convert(data: any, fromMimeType: string, toMimeType: string, options?: MediaObjectOptions): Promise<any> {
const mime = new MIMEType(toMimeType);
const op = parseImageOp(mime.parameters);
const ffmpegInput = JSON.parse(data.toString()) as FFmpegInput;
return processImageOp(ffmpegInput, op, parseFloat(mime.parameters.get('time')), options?.sourceId, this.plugin.debugConsole);
}
}

View File

@@ -1,13 +1,20 @@
import sdk, { BufferConverter, Image, ImageOptions, MediaObject, MediaObjectOptions, ScryptedDeviceBase, ScryptedMimeTypes } from "@scrypted/sdk";
import type sharp from 'sharp';
export let sharpInstance: typeof sharp;
try {
sharpInstance = require('sharp');
console.log('sharp loaded');
}
catch (e) {
console.warn('sharp failed to load, scrypted server may be out of date', e);
let hasLoadedSharp = false;
let sharpInstance: typeof sharp;
export function loadSharp() {
if (!hasLoadedSharp) {
hasLoadedSharp = true;
try {
sharpInstance = require('sharp');
console.log('sharp loaded');
}
catch (e) {
console.warn('sharp failed to load, scrypted server may be out of date', e);
}
}
return !!sharpInstance;
}
export const ImageReaderNativeId = 'imagereader';
@@ -64,6 +71,12 @@ export class VipsImage implements Image {
if (options?.format === 'rgb') {
transformed.removeAlpha().toFormat('raw');
}
else if (options?.format === 'rgba') {
transformed.ensureAlpha().toFormat('raw');
}
else if (options?.format === 'gray') {
transformed.toFormat('raw').grayscale();
}
else if (options?.format === 'jpg') {
transformed.toFormat('jpg');
}
@@ -98,7 +111,9 @@ export class VipsImage implements Image {
}
}
export async function loadVipsImage(data: Buffer|string, sourceId: string) {
export async function loadVipsImage(data: Buffer | string, sourceId: string) {
loadSharp();
const image = sharpInstance(data, {
failOnError: false,
});

View File

@@ -2,14 +2,15 @@ import AxiosDigestAuth from '@koush/axios-digest-auth';
import { AutoenableMixinProvider } from "@scrypted/common/src/autoenable-mixin-provider";
import { createMapPromiseDebouncer, RefreshPromise, singletonPromise, TimeoutError } from "@scrypted/common/src/promise-utils";
import { SettingsMixinDeviceBase, SettingsMixinDeviceOptions } from "@scrypted/common/src/settings-mixin";
import sdk, { BufferConverter, Camera, DeviceManifest, DeviceProvider, FFmpegInput, MediaObject, MediaObjectOptions, MixinProvider, RequestMediaStreamOptions, RequestPictureOptions, ResponsePictureOptions, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera } from "@scrypted/sdk";
import sdk, { BufferConverter, Camera, DeviceManifest, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, MediaObject, MediaObjectOptions, MixinProvider, RequestMediaStreamOptions, RequestPictureOptions, ResponsePictureOptions, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera } from "@scrypted/sdk";
import { StorageSettings } from "@scrypted/sdk/storage-settings";
import axios, { AxiosInstance } from "axios";
import https from 'https';
import path from 'path';
import MimeType from 'whatwg-mimetype';
import url from 'url';
import { ffmpegFilterImage, ffmpegFilterImageBuffer } from './ffmpeg-image-filter';
import { ImageReader, ImageReaderNativeId, loadVipsImage, sharpInstance } from './image-reader';
import { ImageConverter, ImageConverterNativeId } from './image-converter';
import { ImageReader, ImageReaderNativeId, loadSharp, loadVipsImage } from './image-reader';
import { ImageWriter, ImageWriterNativeId } from './image-writer';
const { mediaManager, systemManager } = sdk;
@@ -175,7 +176,12 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
request.refresh = false;
takePrebufferPicture = async () => {
// this.console.log('snapshotting active prebuffer');
return mediaManager.convertMediaObjectToBuffer(await realDevice.getVideoStream(request), 'image/jpeg');
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(await realDevice.getVideoStream(request), ScryptedMimeTypes.FFmpegInput);
return ffmpegFilterImage(ffmpegInput.inputArguments, {
console: this.debugConsole,
ffmpegPath: await mediaManager.getFFmpegPath(),
timeout: 10000,
});
};
return takePrebufferPicture;
}
@@ -261,7 +267,7 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
throw new Error('Snapshot Unavailable (Snapshot URL empty)');
}
async takePicture(options?: RequestPictureOptions): Promise<MediaObject> {
async takePictureRaw(options?: RequestPictureOptions): Promise<Buffer> {
let picture: Buffer;
const eventSnapshot = options?.reason === 'event';
@@ -269,7 +275,7 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
picture = await this.snapshotDebouncer({
id: options?.id,
reason: options?.reason,
}, async () => {
}, eventSnapshot ? 0 : 2000, async () => {
let picture = await this.takePictureInternal();
picture = await this.cropAndScale(picture);
this.clearCachedPictures();
@@ -288,7 +294,7 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
throw e;
if (!this.currentPicture)
return this.createMediaObject(await this.createErrorImage(e), 'image/jpeg');
return this.createErrorImage(e);
this.console.warn('Snapshot failed, but recovered from cache', e);
picture = this.currentPicture;
@@ -300,10 +306,10 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
picture = await this.snapshotDebouncer({
needSoftwareResize: true,
picture: options.picture,
}, async () => {
}, eventSnapshot ? 0 : 2000, async () => {
this.debugConsole?.log("Resizing picture from camera", options?.picture);
if (sharpInstance) {
if (loadSharp()) {
const vips = await loadVipsImage(picture, this.id);
try {
const ret = await vips.toBuffer({
@@ -352,10 +358,14 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
catch (e) {
if (eventSnapshot)
throw e;
return this.createMediaObject(await this.createErrorImage(e), 'image/jpeg');
return this.createErrorImage(e);
}
}
return this.createMediaObject(picture, 'image/jpeg');
return picture;
}
async takePicture(options?: RequestPictureOptions): Promise<MediaObject> {
return this.createMediaObject(await this.takePictureRaw(options), 'image/jpeg');
}
async cropAndScale(picture: Buffer) {
@@ -367,7 +377,7 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
const xmax = Math.max(...this.storageSettings.values.snapshotCropScale.map(([x, y]) => x)) / 100;
const ymax = Math.max(...this.storageSettings.values.snapshotCropScale.map(([x, y]) => y)) / 100;
if (sharpInstance) {
if (loadSharp()) {
const vips = await loadVipsImage(picture, this.id);
try {
const ret = await vips.toBuffer({
@@ -532,45 +542,21 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
}
}
type DimDict<T extends string> = {
[key in T]: string;
};
export function parseDims<T extends string>(dict: DimDict<T>) {
const ret: {
[key in T]?: number;
} & {
fractional?: boolean;
} = {
};
for (const t of Object.keys(dict)) {
const val = dict[t as T];
if (val?.endsWith('%')) {
ret.fractional = true;
ret[t] = parseFloat(val?.substring(0, val?.length - 1)) / 100;
}
else {
ret[t] = val ? parseFloat(val) : undefined;
}
}
return ret;
}
class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, BufferConverter, Settings, DeviceProvider {
export class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, BufferConverter, Settings, DeviceProvider, HttpRequestHandler {
storageSettings = new StorageSettings(this, {
debugLogging: {
title: 'Debug Logging',
description: 'Debug logging for all cameras will be shown in the Snapshot Plugin Console.',
type: 'boolean',
}
},
});
mixinDevices = new Map<string, SnapshotMixin>();
constructor(nativeId?: string) {
super(nativeId);
this.fromMimeType = ScryptedMimeTypes.FFmpegInput;
this.toMimeType = 'image/jpeg';
this.fromMimeType = ScryptedMimeTypes.SchemePrefix + 'scrypted-media' + ';converter-weight=0';
this.toMimeType = ScryptedMimeTypes.LocalUrl;
const manifest: DeviceManifest = {
devices: [
@@ -581,11 +567,19 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
],
type: ScryptedDeviceType.Builtin,
nativeId: ImageWriterNativeId,
},
{
name: 'Image Converter',
interfaces: [
ScryptedInterface.BufferConverter,
],
type: ScryptedDeviceType.Builtin,
nativeId: ImageConverterNativeId,
}
]
],
};
if (sharpInstance) {
if (loadSharp()) {
manifest.devices.push(
{
name: 'Image Reader',
@@ -604,6 +598,8 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
}
async getDevice(nativeId: string): Promise<any> {
if (nativeId === ImageConverterNativeId)
return new ImageConverter(this, ImageConverterNativeId);
if (nativeId === ImageWriterNativeId)
return new ImageWriter(ImageWriterNativeId);
if (nativeId === ImageReaderNativeId)
@@ -626,103 +622,74 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
return this.console;
}
async getLocalSnapshot(id: string, iface: string, search: string) {
const endpoint = await sdk.endpointManager.getAuthenticatedPath(this.nativeId);
const ret = url.resolve(path.join(endpoint, id, iface, `${Date.now()}.jpg`) + `${search}`, '');
return Buffer.from(ret);
}
async convert(data: any, fromMimeType: string, toMimeType: string, options?: MediaObjectOptions): Promise<any> {
const mime = new MimeType(toMimeType);
const url = new URL(data.toString());
const id = url.hostname;
const path = url.pathname.split('/')[1];
const ffmpegInput = JSON.parse(data.toString()) as FFmpegInput;
if (path === ScryptedInterface.Camera) {
return this.getLocalSnapshot(id, path, url.search);
}
if (path === ScryptedInterface.VideoCamera) {
return this.getLocalSnapshot(id, path, url.search);
}
else {
throw new Error('Unrecognized Scrypted Media interface.')
}
}
const {
width,
height,
fractional
} = parseDims({
width: mime.parameters.get('width'),
height: mime.parameters.get('height'),
});
const {
left,
top,
right,
bottom,
fractional: cropFractional,
} = parseDims({
left: mime.parameters.get('left'),
top: mime.parameters.get('top'),
right: mime.parameters.get('right'),
bottom: mime.parameters.get('bottom'),
});
const filename = ffmpegInput.url?.startsWith('file:') && new URL(ffmpegInput.url).pathname;
if (filename && sharpInstance) {
const vips = await loadVipsImage(filename, options?.sourceId);
const resize = width && {
width,
height,
};
if (fractional) {
if (resize.width)
resize.width *= vips.width;
if (resize.height)
resize.height *= vips.height;
}
const crop = left && {
left,
top,
width: right - left,
height: bottom - top,
};
if (cropFractional) {
crop.left *= vips.width;
crop.top *= vips.height;
crop.width *= vips.width;
crop.height *= vips.height;
}
try {
const ret = await vips.toBuffer({
resize,
crop,
format: 'jpg',
});
return ret;
}
finally {
vips.close();
}
async onRequest(request: HttpRequest, response: HttpResponse): Promise<void> {
if (request.isPublicEndpoint) {
response.send('', {
code: 404,
});
return;
}
const args = [
...ffmpegInput.inputArguments,
...(ffmpegInput.h264EncoderArguments || []),
];
const pathname = request.url.substring(request.rootPath.length);
const [_, id, iface] = pathname.split('/');
try {
if (iface !== ScryptedInterface.Camera && iface !== ScryptedInterface.VideoCamera)
throw new Error();
return ffmpegFilterImage(args, {
console: this.debugConsole,
ffmpegPath: await mediaManager.getFFmpegPath(),
resize: width === undefined && height === undefined
? undefined
: {
width,
height,
fractional,
},
crop: left === undefined || right === undefined || top === undefined || bottom === undefined
? undefined
: {
left,
top,
width: right - left,
height: bottom - top,
fractional: cropFractional,
},
timeout: 10000,
time: parseFloat(mime.parameters.get('time')),
});
const search = new URLSearchParams(pathname.split('?')[1]);
const mixin = this.mixinDevices.get(id);
let buffer: Buffer;
const rpo: RequestPictureOptions = {
picture: {
width: parseInt(search.get('width')) || undefined,
height: parseInt(search.get('height')) || undefined,
}
};
if (mixin && iface === ScryptedInterface.Camera) {
buffer = await mixin.takePictureRaw(rpo)
}
else {
const device = systemManager.getDeviceById<Camera & VideoCamera>(id);
const picture = iface === ScryptedInterface.Camera ? await device.takePicture(rpo) : await device.getVideoStream();
buffer = await mediaManager.convertMediaObjectToBuffer(picture, 'image/jpeg');
}
response.send(buffer, {
headers: {
'Content-Type': 'image/jpeg',
'Cache-Control': 'max-age=10',
}
});
}
catch (e) {
response.send('', {
code: 500,
});
}
}
async canMixin(type: ScryptedDeviceType, interfaces: string[]): Promise<string[]> {
@@ -730,8 +697,9 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
return [ScryptedInterface.Camera, ScryptedInterface.Settings];
return undefined;
}
async getMixin(mixinDevice: any, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: { [key: string]: any; }): Promise<any> {
return new SnapshotMixin(this, {
const ret = new SnapshotMixin(this, {
mixinDevice,
mixinDeviceInterfaces,
mixinDeviceState,
@@ -739,6 +707,8 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
group: 'Snapshot',
groupKey: 'snapshot',
});
this.mixinDevices.set(ret.id, ret);
return ret;
}
async shouldEnableMixin(device: ScryptedDevice) {
@@ -751,6 +721,8 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
}
async releaseMixin(id: string, mixinDevice: any): Promise<void> {
if (this.mixinDevices.get(id) === mixinDevice)
this.mixinDevices.delete(id);
await mixinDevice.release()
}
}

View File

@@ -0,0 +1,164 @@
import sdk, { FFmpegInput, RecordingStreamThumbnailOptions } from '@scrypted/sdk';
import url from 'url';
import type { MIMETypeParameters } from 'whatwg-mimetype';
import { FFmpegImageFilterOptions, ffmpegFilterImage, ffmpegFilterImageBuffer } from './ffmpeg-image-filter';
import { loadSharp, loadVipsImage } from './image-reader';
export type DimDict<T extends string> = {
[key in T]: string;
};
export function parseDims<T extends string>(dict: DimDict<T>) {
const ret: {
[key in T]?: number;
} & {
fractional?: boolean;
} = {
};
for (const t of Object.keys(dict)) {
const val = dict[t as T];
if (val?.endsWith('%')) {
ret.fractional = true;
ret[t] = parseFloat(val?.substring(0, val?.length - 1)) / 100;
}
else {
ret[t] = val ? parseFloat(val) : undefined;
}
}
return ret;
}
export interface ImageOp {
resize?: ReturnType<typeof parseDims<'width' | 'height'>>;
crop?: ReturnType<typeof parseDims<'left' | 'top' | 'right' | 'bottom'>>;
}
export function parseImageOp(parameters: MIMETypeParameters | URLSearchParams): ImageOp {
return {
resize: parseDims({
width: parameters.get('width'),
height: parameters.get('height'),
}),
crop: parseDims({
left: parameters.get('left'),
top: parameters.get('top'),
right: parameters.get('right'),
bottom: parameters.get('bottom'),
}),
};
}
export function toImageOp(options: RecordingStreamThumbnailOptions) {
const ret: ImageOp = {};
const { resize, crop } = options || {};
if (resize) {
ret.resize = {
width: resize.width,
height: resize.height,
fractional: resize.percent,
};
}
if (crop) {
ret.crop = {
left: crop.left,
top: crop.top,
right: crop.left + crop.width,
bottom: crop.top + crop.height,
fractional: crop.percent,
}
}
return ret;
}
export async function processImageOp(input: string | FFmpegInput | Buffer, op: ImageOp, time: number, sourceId: string, debugConsole: Console): Promise<Buffer> {
const { crop, resize } = op;
const { width, height, fractional } = resize || {};
const { left, top, right, bottom, fractional: cropFractional } = crop || {};
const filenameOrBuffer = typeof input === 'string' || Buffer.isBuffer(input) ? input : input.url?.startsWith('file:') && url.fileURLToPath(input.url);
if (filenameOrBuffer && loadSharp()) {
const vips = await loadVipsImage(filenameOrBuffer, sourceId);
const resize = width != null && {
width,
height,
};
if (fractional) {
if (resize.width)
resize.width *= vips.width;
if (resize.height)
resize.height *= vips.height;
}
const crop = left != null && {
left,
top,
width: right - left,
height: bottom - top,
};
if (cropFractional) {
crop.left *= vips.width;
crop.top *= vips.height;
crop.width *= vips.width;
crop.height *= vips.height;
}
try {
const ret = await vips.toBuffer({
resize,
crop,
format: 'jpg',
});
return ret;
}
finally {
vips.close();
}
}
const ffmpegOpts: FFmpegImageFilterOptions = {
console: debugConsole,
ffmpegPath: await sdk.mediaManager.getFFmpegPath(),
resize: width === undefined && height === undefined
? undefined
: {
width,
height,
fractional,
},
crop: left === undefined || right === undefined || top === undefined || bottom === undefined
? undefined
: {
left,
top,
width: right - left,
height: bottom - top,
fractional: cropFractional,
},
timeout: 10000,
time,
};
if (Buffer.isBuffer(input)) {
return ffmpegFilterImageBuffer(input, ffmpegOpts);
}
const ffmpegInput: FFmpegInput = typeof input !== 'string'
? input
: {
inputArguments: [
'-i', input,
]
};
const args = [
...ffmpegInput.inputArguments,
...(ffmpegInput.h264EncoderArguments || []),
];
return ffmpegFilterImage(args, ffmpegOpts);
}

View File

@@ -1,17 +1,18 @@
{
"name": "@scrypted/tapo",
"version": "0.0.10",
"version": "0.0.11",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tapo",
"version": "0.0.10",
"version": "0.0.11",
"dependencies": {
"axios": "^1.3.4",
"crc-32": "^1.2.2"
},
"devDependencies": {
"@koush/axios-digest-auth": "0.8.6",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^18.14.6",
@@ -36,7 +37,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.84",
"version": "0.3.2",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -109,6 +110,25 @@
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@koush/axios-digest-auth": {
"version": "0.8.6",
"resolved": "https://registry.npmjs.org/@koush/axios-digest-auth/-/axios-digest-auth-0.8.6.tgz",
"integrity": "sha512-e/XKs7/BYpPQkces0Cm4dUmhT9hR0rjvnNZAVRyRnNWdQ8cyCMFWS9HIrMWOdzAocKDNBXi1vKjJ8CywrW5xgQ==",
"dev": true,
"dependencies": {
"auth-header": "^1.0.0",
"axios": "^0.21.4"
}
},
"node_modules/@koush/axios-digest-auth/node_modules/axios": {
"version": "0.21.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz",
"integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==",
"dev": true,
"dependencies": {
"follow-redirects": "^1.14.0"
}
},
"node_modules/@scrypted/common": {
"resolved": "../../common",
"link": true
@@ -179,10 +199,16 @@
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"node_modules/auth-header": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/auth-header/-/auth-header-1.0.0.tgz",
"integrity": "sha512-CPPazq09YVDUNNVWo4oSPTQmtwIzHusZhQmahCKvIsk0/xH6U3QsMAv3sM+7+Q0B1K2KJ/Q38OND317uXs4NHA==",
"dev": true
},
"node_modules/axios": {
"version": "1.3.4",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.3.4.tgz",
"integrity": "sha512-toYm+Bsyl6VC5wSkfkbbNB6ROv7KY93PEBBL6xyDczaIHasAiv4wPqQ/c4RjoQzipxRD2W5g21cOqQulZ7rHwQ==",
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz",
"integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==",
"dependencies": {
"follow-redirects": "^1.15.0",
"form-data": "^4.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/tapo",
"version": "0.0.10",
"version": "0.0.11",
"description": "Tapo Camera Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -6,7 +6,10 @@ import crypto from 'crypto';
import { Duplex, PassThrough, Writable } from 'stream';
import { digestAuthHeader } from './digest-auth';
export function getTapoAdminPassword(cloudPassword: string) {
export function getTapoAdminPassword(cloudPassword: string, useSHA256: boolean) {
if (useSHA256) {
return crypto.createHash('sha256').update(Buffer.from(cloudPassword)).digest('hex').toUpperCase();
}
return crypto.createHash('md5').update(Buffer.from(cloudPassword)).digest('hex').toUpperCase();
}
@@ -36,8 +39,9 @@ export class TapoAPI {
});
const wwwAuthenticate = response.headers['www-authenticate'];
const useSHA256 = wwwAuthenticate.includes('encrypt_type="3"');
const password = getTapoAdminPassword(options.cloudPassword);
const password = getTapoAdminPassword(options.cloudPassword, useSHA256);
const auth = digestAuthHeader('POST', '/stream', wwwAuthenticate, 'admin', password, 0) + ', algorithm=MD5';

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/webrtc",
"version": "0.1.81",
"version": "0.2.3",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/webrtc",
"version": "0.1.81",
"version": "0.2.3",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/webrtc",
"version": "0.1.81",
"version": "0.2.3",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -195,7 +195,7 @@ export async function createTrackForwarder(options: {
videoTranscodeArguments.push('-vcodec', 'copy')
}
const audioTranscodeArguments = getFFmpegRtpAudioOutputArguments(ffmpegInput.mediaStreamOptions?.audio?.codec, audioTransceiver.sender.codec, maximumCompatibilityMode);
const audioTranscodeArguments = getFFmpegRtpAudioOutputArguments(ffmpegInput.mediaStreamOptions?.audio, audioTransceiver.sender.codec, maximumCompatibilityMode);
let needPacketization = !!videoCodecCopy;
if (transcode) {
@@ -219,7 +219,18 @@ export async function createTrackForwarder(options: {
let opusRepacketizer: OpusRepacketizer;
let lastPacketTs: number = 0;
const audioRtpTrack: RtpTrack = {
codecCopy: audioCodecCopy,
negotiate: async msection => {
if (!audioCodecCopy)
return false;
if (audioCodecCopy === 'copy')
return true;
if (msection.codec === 'opus')
return msection.rtpmap.clock === 48000;
if (msection.codec !== 'pcm_mulaw' && msection.codec !== 'pcm_alaw')
return false;
return msection.rtpmap.clock === 8000;
},
// codecCopy: audioCodecCopy,
onRtp: buffer => {
if (false && audioTransceiver.sender.codec.mimeType?.toLowerCase() === "audio/opus") {
// this will use 3 20ms frames, 60ms. seems to work up to 6/120ms

View File

@@ -309,6 +309,8 @@ export async function startRtpForwarderProcess(console: Console, ffmpegInput: FF
audio.srtp = undefined;
inputArguments = [
'-analyzeduration', '0',
'-probesize', '512',
'-i', `rtsp://${audioClient.host}:${audioClient.port}`,
];
}

View File

@@ -1,3 +1,4 @@
import { AudioStreamOptions } from "@scrypted/sdk";
import { RTCRtpCodecParameters } from "./werift";
export const requiredVideoCodec = new RTCRtpCodecParameters({
@@ -46,26 +47,29 @@ export function getAudioCodec(outputCodecParameters: RTCRtpCodecParameters) {
return {
name: 'pcm_alaw',
encoder: 'pcm_alaw',
sampleRate: 8000,
};
}
if (outputCodecParameters.name === 'PCMU') {
return {
name: 'pcm_mulaw',
encoder: 'pcm_mulaw',
sampleRate: 8000,
};
}
return {
name: 'opus',
encoder: 'libopus',
sampleRate: 16000,
};
}
export function getFFmpegRtpAudioOutputArguments(inputCodec: string, outputCodecParameters: RTCRtpCodecParameters, maximumCompatibilityMode: boolean) {
export function getFFmpegRtpAudioOutputArguments(audio: AudioStreamOptions, outputCodecParameters: RTCRtpCodecParameters, maximumCompatibilityMode: boolean) {
const ret: string[] = [];
const { encoder, name } = getAudioCodec(outputCodecParameters);
const { encoder, name, sampleRate } = getAudioCodec(outputCodecParameters);
if (inputCodec === name && !maximumCompatibilityMode) {
if (audio?.codec === name && (!audio?.sampleRate || audio?.sampleRate === sampleRate) && !maximumCompatibilityMode) {
ret.push('-acodec', 'copy');
}
else {

View File

@@ -67,17 +67,21 @@ export class WeriftSignalingSession implements RTCSignalingSession {
async addIceCandidate(candidate: RTCIceCandidateInit) {
this.remoteHasV6 ||= isV6Only(candidate.candidate?.split(' ')?.[4]);
// todo: fix this in werift or verify it still occurs at later point
// werift seems to choose whatever candidate pair results in the fastest connection.
// this makes it sometimes choose the STUN or TURN candidate even when
// on the local network.
if (candidate.candidate?.includes('relay')) {
if (this.remoteHasV6 && !this.localHasV6) {
this.console.log('Possible mobile network IPv6to4 translation detected.');
}
else {
await sleep(500);
}
// note: this code is done, werift was modified to ban bad ips like 6to4 relays from tmobile.
// todo: fix this in werift or verify it still occurs at later point
// werift seems to choose whatever candidate pair results in the fastest connection.
// this makes it sometimes choose the STUN or TURN candidate even when
// on the local network.
// if (this.remoteHasV6 && !this.localHasV6) {
// this.console.log('Possible mobile network IPv6to4 translation detected.');
// }
// else {
// await sleep(500);
// }
await sleep(500);
}
else if (candidate.candidate?.includes('srflx')) {
await sleep(250);

View File

@@ -278,7 +278,7 @@ export async function createRTCPeerConnectionSource(options: {
const { kill: destroy } = await startRtpForwarderProcess(console, ffmpegInput, {
audio: {
codecCopy: audioCodec.name,
encoderArguments: getFFmpegRtpAudioOutputArguments(ffmpegInput.mediaStreamOptions?.audio?.codec, audioTransceiver.sender.codec, maximumCompatibilityMode),
encoderArguments: getFFmpegRtpAudioOutputArguments(ffmpegInput.mediaStreamOptions?.audio, audioTransceiver.sender.codec, maximumCompatibilityMode),
onRtp: (rtp) => {
const packet = RtpPacket.deSerialize(rtp);
const now = Date.now();

View File

@@ -0,0 +1,22 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/ubuntu
{
"name": "Scrypted",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "koush/scrypted",
"overrideCommand": false,
"mounts": [
"source=${localWorkspaceFolder}/.devcontainer/volume,target=/server/volume,type=bind,consistency=cached"
]
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "uname -a",
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

4
plugins/wyze/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
.DS_Store
out/
node_modules/
dist/

12
plugins/wyze/.npmignore Normal file
View File

@@ -0,0 +1,12 @@
.DS_Store
out/
node_modules/
*.map
fs
src
.vscode
dist/*.js
__pycache__
docker-wyze-bridge
.devcontainer
.venv

29
plugins/wyze/.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,29 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Scrypted Debugger",
"type": "python",
"request": "attach",
"connect": {
"host": "${config:scrypted.debugHost}",
"port": 10081
},
"justMyCode": false,
"preLaunchTask": "scrypted: deploy+debug",
"pathMappings": [
{
"localRoot": "/Volumes/Dev/scrypted/server/python/",
"remoteRoot": "/Volumes/Dev/scrypted/server/python/",
},
{
"localRoot": "${workspaceFolder}/src",
"remoteRoot": "${config:scrypted.pythonRemoteRoot}"
},
]
}
]
}

19
plugins/wyze/.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,19 @@
{
// docker installation
// "scrypted.debugHost": "koushik-thin",
// "scrypted.serverRoot": "/server",
// pi local installation
// "scrypted.debugHost": "192.168.2.119",
// "scrypted.serverRoot": "/home/pi/.scrypted",
// local checkout
"scrypted.debugHost": "koushik-ubuntu",
"scrypted.serverRoot": "/server",
"scrypted.pythonRemoteRoot": "${config:scrypted.serverRoot}/volume/plugin.zip",
"python.analysis.extraPaths": [
"./node_modules/@scrypted/sdk/types/scrypted_python"
]
}

20
plugins/wyze/.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,20 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "scrypted: deploy+debug",
"type": "shell",
"presentation": {
"echo": true,
"reveal": "silent",
"focus": false,
"panel": "shared",
"showReuseMessage": true,
"clear": false
},
"command": "npm run scrypted-vscode-launch ${config:scrypted.debugHost}",
},
]
}

5
plugins/wyze/README.md Normal file
View File

@@ -0,0 +1,5 @@
# Wyze Camera Plugin for Scrypted
This plugin must be installed inside a Scrypted Docker installation. Mac and Windows are not supported.
The Wyze plugin requires an [API Key](https://developer-api-console.wyze.com/#/apikey/view).

View File

@@ -0,0 +1 @@
../docker-wyze-bridge/app/wyzecam/tutk/device_config.json

85
plugins/wyze/package-lock.json generated Normal file
View File

@@ -0,0 +1,85 @@
{
"name": "@scrypted/wyze",
"version": "0.0.11",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/wyze",
"version": "0.0.11",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"version": "0.3.3",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
},
"bin": {
"scrypted-changelog": "bin/scrypted-changelog.js",
"scrypted-debug": "bin/scrypted-debug.js",
"scrypted-deploy": "bin/scrypted-deploy.js",
"scrypted-deploy-debug": "bin/scrypted-deploy-debug.js",
"scrypted-package-json": "bin/scrypted-package-json.js",
"scrypted-setup-project": "bin/scrypted-setup-project.js",
"scrypted-webpack": "bin/scrypted-webpack.js"
},
"devDependencies": {
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"stringify-object": "^3.3.0",
"ts-node": "^10.4.0",
"typedoc": "^0.23.21"
}
},
"../sdk": {
"extraneous": true
},
"node_modules/@scrypted/sdk": {
"resolved": "../../sdk",
"link": true
}
},
"dependencies": {
"@scrypted/sdk": {
"version": "file:../../sdk",
"requires": {
"@babel/preset-typescript": "^7.18.6",
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"stringify-object": "^3.3.0",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"ts-node": "^10.4.0",
"typedoc": "^0.23.21",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
}
}
}
}

37
plugins/wyze/package.json Normal file
View File

@@ -0,0 +1,37 @@
{
"name": "@scrypted/wyze",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",
"build": "scrypted-webpack",
"prepublishOnly": "NODE_ENV=production scrypted-webpack",
"prescrypted-vscode-launch": "scrypted-webpack",
"scrypted-vscode-launch": "scrypted-deploy-debug",
"scrypted-deploy-debug": "scrypted-deploy-debug",
"scrypted-debug": "scrypted-debug",
"scrypted-deploy": "scrypted-deploy",
"scrypted-readme": "scrypted-readme",
"scrypted-package-json": "scrypted-package-json"
},
"keywords": [
"scrypted",
"plugin",
"wyze"
],
"scrypted": {
"name": "Wyze Plugin",
"runtime": "python",
"type": "DeviceProvider",
"interfaces": [
"DeviceProvider",
"Settings"
],
"pluginDependencies": [
"@scrypted/prebuffer-mixin"
]
},
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.11"
}

778
plugins/wyze/src/main.py Normal file
View File

@@ -0,0 +1,778 @@
from __future__ import annotations
from typing import Any, Coroutine, List, Dict, Callable, Iterator, MutableSet
import scrypted_sdk
import asyncio
import urllib.request
import os
import urllib
import sys
import platform
from scrypted_sdk.other import MediaObject
import wyzecam
import wyzecam.api_models
import json
import threading
import queue
import traceback
from ctypes import c_int
import concurrent.futures
import subprocess
import base64
import struct
from wyzecam.tutk.tutk import (
FRAME_SIZE_2K,
FRAME_SIZE_1080P,
FRAME_SIZE_360P,
)
from scrypted_sdk.types import (
DeviceProvider,
RequestMediaStreamOptions,
ResponseMediaStreamOptions,
VideoCamera,
ScryptedDeviceType,
ScryptedInterface,
Settings,
Setting,
)
os.environ["TUTK_PROJECT_ROOT"] = os.path.join(
os.environ["SCRYPTED_PLUGIN_VOLUME"], "zip/unzipped/fs"
)
sdkKey = "AQAAAIZ44fijz5pURQiNw4xpEfV9ZysFH8LYBPDxiONQlbLKaDeb7n26TSOPSGHftbRVo25k3uz5of06iGNB4pSfmvsCvm/tTlmML6HKS0vVxZnzEuK95TPGEGt+aE15m6fjtRXQKnUav59VSRHwRj9Z1Kjm1ClfkSPUF5NfUvsb3IAbai0WlzZE1yYCtks7NFRMbTXUMq3bFtNhEERD/7oc504b"
toThreadExecutor = concurrent.futures.ThreadPoolExecutor(
max_workers=2, thread_name_prefix="image"
)
codecMap = {
"mulaw": "PCMU",
"alaw": "PCMA",
"s16be": "L16",
"opus": "OPUS",
"aac": "MP4A-LATM",
}
async def to_thread(f):
loop = asyncio.get_running_loop()
return await loop.run_in_executor(toThreadExecutor, f)
class CodecInfo:
videoCodec: str
videoCodecInfo: tuple[bytes, bytes]
audioCodec: str
audioSampleRate: int
def __init__(
self,
videoCodec: str,
videoCodecInfo: tuple[bytes, bytes],
audioCodec: str,
audioSampleRate: int,
) -> None:
self.videoCodec = videoCodec
self.videoCodecInfo = videoCodecInfo
self.audioCodec = audioCodec
self.audioSampleRate = audioSampleRate
class WyzeCamera(scrypted_sdk.ScryptedDeviceBase, VideoCamera, Settings):
camera: wyzecam.WyzeCamera
plugin: WyzePlugin
streams: MutableSet[wyzecam.WyzeIOTCSession]
activeStream: wyzecam.WyzeIOTCSession
audioQueues: MutableSet[queue.Queue[tuple[bytes, Any]]]
main: CodecInfo
sub: CodecInfo
def __init__(
self, nativeId: str | None, plugin: WyzePlugin, camera: wyzecam.WyzeCamera
):
super().__init__(nativeId=nativeId)
self.plugin = plugin
self.camera = camera
self.streams = set()
self.activeStream = None
self.audioQueues = set()
self.main = None
self.sub = None
self.mainFrameSize = FRAME_SIZE_2K if camera.is_2k else FRAME_SIZE_1080P
self.subByterate = 30
self.mainServer = asyncio.ensure_future(self.ensureServer(self.handleClientHD))
self.subServer = asyncio.ensure_future(self.ensureServer(self.handleClientSD))
self.audioServer = asyncio.ensure_future(
self.ensureServer(self.handleAudioClient)
)
self.rfcServer = asyncio.ensure_future(
self.ensureServer(self.handleMainRfcClient)
)
self.rfcSubServer = asyncio.ensure_future(
self.ensureServer(self.handleSubRfcClient)
)
def safeParseJsonStorage(self, key: str):
try:
return json.loads(self.storage.getItem(key))
except:
return None
def getMainByteRate(self, default=False):
try:
bit = int(self.safeParseJsonStorage("bitrate"))
bit = round(bit / 8)
bit = bit if 1 <= bit <= 255 else 0
if not bit:
raise
if default:
return bit * 8
return bit
except:
if default:
return "Default"
return 120 if self.camera.is_2k else 60
async def getSettings(self):
ret: List[Setting] = []
ret.append(
{
"key": "bitrate",
"title": "Main Stream Bitrate",
"description": "The bitrate used by the main stream.",
"value": self.safeParseJsonStorage("bitrate"),
"combobox": True,
"value": str(self.getMainByteRate(True)),
"choices": [
"Default",
"480",
"960",
"1440",
"1920",
],
}
)
return ret
async def putSetting(self, key, value):
self.storage.setItem(key, json.dumps(value))
await scrypted_sdk.deviceManager.onDeviceEvent(
self.nativeId, ScryptedInterface.Settings.value, None
)
await scrypted_sdk.deviceManager.onDeviceEvent(
self.nativeId, ScryptedInterface.VideoCamera.value, None
)
async def handleClientHD(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
):
return await self.handleClient(
self.plugin.account.model_copy(),
self.mainFrameSize,
self.getMainByteRate(),
reader,
writer,
)
async def handleClientSD(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
):
account = self.plugin.account.model_copy()
# wyze cams will disconnect first stream if the phone id requests a second stream.
# use a different substream phone id, similar to how docker wyze bridge does it.
account.phone_id = account.phone_id[2:]
return await self.handleClient(
account,
FRAME_SIZE_360P,
self.subByterate,
reader,
writer,
)
def receiveAudioData(self):
q: queue.Queue[tuple[bytes, Any]] = queue.Queue()
self.audioQueues.add(q)
try:
while True:
b, info = q.get()
if not b:
return
yield b, info
finally:
self.audioQueues.remove(q)
async def handleMainRfcClient(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
):
return await self.handleRfcClient(False, reader, writer)
async def handleSubRfcClient(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
):
return await self.handleRfcClient(True, reader, writer)
async def handleRfcClient(
self,
substream: bool,
reader: asyncio.StreamReader,
writer: asyncio.StreamWriter,
):
info = self.sub if substream else self.main
port = await self.subServer if substream else await self.mainServer
audioPort = await self.audioServer
class Protocol:
def __init__(self, pt: int) -> None:
self.pt = pt
def connection_made(self, transport):
self.transport = transport
def datagram_received(self, data, addr):
l = len(data)
len_data = struct.pack(">H", l)
writer.write(len_data)
writer.write(data)
ffmpeg = await scrypted_sdk.mediaManager.getFFmpegPath()
loop = asyncio.get_event_loop()
vt, vp = await loop.create_datagram_endpoint(
lambda: Protocol(96), local_addr=("127.0.0.1", 0)
)
vhost, vport = vt._sock.getsockname()
vprocess = subprocess.Popen(
[
ffmpeg,
"-analyzeduration",
"0",
"-probesize",
"100k",
"-f",
"h264",
"-i",
f"tcp://127.0.0.1:{port}",
"-vcodec",
"copy",
"-an",
"-f",
"rtp",
"-payload_type",
"96",
f"rtp://127.0.0.1:{vport}?pkt_size=1300",
]
)
at, ap = await loop.create_datagram_endpoint(
lambda: Protocol(97), local_addr=("127.0.0.1", 0)
)
ahost, aport = at._sock.getsockname()
aprocess = subprocess.Popen(
[
ffmpeg,
"-analyzeduration",
"0",
"-probesize",
"1024",
"-f",
info.audioCodec,
"-ar",
f"{info.audioSampleRate}",
"-i",
f"tcp://127.0.0.1:{audioPort}",
"-acodec",
"copy",
"-vn",
"-f",
"rtp",
"-payload_type",
"97",
f"rtp://127.0.0.1:{aport}?pkt_size=1300",
]
)
try:
while True:
buffer = await reader.read()
if not len(buffer):
return
except Exception as e:
traceback.print_exception(e)
finally:
self.print("rfc reader closed")
# aprocess.stdin.write("q\n")
aprocess.terminate()
# vprocess.stdin.write("q\n")
vprocess.terminate()
async def handleAudioClient(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
):
loop = asyncio.get_event_loop()
closed = False
q = queue.Queue()
async def write():
nonlocal closed
d = q.get()
if closed:
pass
if not d or closed:
closed = True
writer.close()
else:
writer.write(d)
def run():
try:
for frame, frame_info in self.receiveAudioData():
if closed:
return
q.put(frame)
asyncio.run_coroutine_threadsafe(write(), loop=loop)
except Exception as e:
traceback.print_exception(e)
finally:
self.print("audio session closed")
q.put(None)
thread = threading.Thread(target=run)
thread.start()
try:
while True:
buffer = await reader.read()
if not len(buffer):
return
except Exception as e:
traceback.print_exception(e)
finally:
self.print("audio reader closed")
closed = True
async def handleClient(
self,
account: wyzecam.WyzeAccount,
frameSize,
bitrate,
reader: asyncio.StreamReader,
writer: asyncio.StreamWriter,
):
loop = asyncio.get_event_loop()
closed = False
q = queue.Queue()
async def write():
nonlocal closed
d = q.get()
if closed:
pass
if not d or closed:
closed = True
writer.close()
else:
writer.write(d)
s = wyzecam.WyzeIOTCSession(
self.plugin.wyze_iotc.tutk_platform_lib,
account,
self.camera,
frame_size=frameSize,
bitrate=bitrate,
# CONNECTING?
stream_state=c_int(2),
)
self.streams.add(s)
startedAudio = False
if not self.activeStream:
self.activeStream = s
def runAudio():
for frame, frame_info in s.recv_audio_data():
for q in self.audioQueues:
q.put((frame, frame_info))
def checkStartAudio():
nonlocal startedAudio
if not startedAudio and self.activeStream == s:
startedAudio = True
thread = threading.Thread(target=runAudio)
thread.start()
def run():
try:
with s as sess:
checkStartAudio()
for frame, frame_info in sess.recv_video_data():
if closed:
return
q.put(frame)
asyncio.run_coroutine_threadsafe(write(), loop=loop)
checkStartAudio()
except Exception as e:
traceback.print_exception(e)
finally:
self.print("session closed")
q.put(None)
thread = threading.Thread(target=run)
thread.start()
try:
while not closed:
buffer = await reader.read()
if not len(buffer):
return
except Exception as e:
traceback.print_exception(e)
finally:
self.streams.remove(s)
if self.activeStream == s:
# promote new audio stream to active
self.activeStream = None
for next in self.streams:
self.activeStream = next
break
self.print("reader closed")
closed = True
writer.close()
async def ensureServer(self, cb) -> int:
server = await asyncio.start_server(cb, "127.0.0.1", 0)
sock = server.sockets[0]
host, port = sock.getsockname()
asyncio.ensure_future(server.serve_forever())
return port
def probeCodec(self, account, frameSize, bitrate):
with wyzecam.WyzeIOTCSession(
self.plugin.wyze_iotc.tutk_platform_lib,
account,
self.camera,
frame_size=frameSize,
bitrate=bitrate,
# CONNECTING?
stream_state=c_int(2),
) as sess:
audioCodec = sess.get_audio_codec()
for data, frame_info in sess.recv_video_data():
nals = data.split(b"\x00\x00\x00\x01")
sps = nals[1]
pps = nals[2]
return audioCodec + (sps, pps)
def probeMainCodec(self):
return self.probeCodec(
self.plugin.account.model_copy(),
self.mainFrameSize,
self.getMainByteRate(),
)
def probeSubCodec(self):
account = self.plugin.account.model_copy()
account.phone_id = account.phone_id[2:]
return self.probeCodec(
account,
FRAME_SIZE_360P,
self.subByterate,
)
async def getVideoStream(
self, options: RequestMediaStreamOptions = None
) -> Coroutine[Any, Any, MediaObject]:
substream = options and options.get("id") == "substream"
if substream:
if not self.sub:
codec, sampleRate, sps, pps = await to_thread(self.probeSubCodec)
self.sub = CodecInfo("h264", (sps, pps), codec, sampleRate)
info = self.sub
if not substream:
if not self.main:
codec, sampleRate, sps, pps = await to_thread(self.probeMainCodec)
self.main = CodecInfo("h264", (sps, pps), codec, sampleRate)
info = self.main
port = await self.subServer if substream else await self.mainServer
audioPort = await self.audioServer
rfcPort = await self.rfcSubServer if substream else await self.rfcServer
msos = self.getVideoStreamOptionsInternal()
mso = msos[1] if substream else msos[0]
mso["audio"]["sampleRate"] = info.audioSampleRate
if True:
sps = base64.b64encode(info.videoCodecInfo[0]).decode()
pps = base64.b64encode(info.videoCodecInfo[1]).decode()
audioCodecName = codecMap.get(info.audioCodec)
sdp = f"""v=0
o=- 0 0 IN IP4 0.0.0.0
s=No Name
t=0 0
m=video 0 RTP/AVP 96
c=IN IP4 0.0.0.0
a=rtpmap:96 H264/90000
a=fmtp:96 packetization-mode=1; sprop-parameter-sets={sps},{pps}; profile-level-id=4D0029
m=audio 0 RTP/AVP 97
c=IN IP4 0.0.0.0
b=AS:128
a=rtpmap:97 {audioCodecName}/{info.audioSampleRate}/1
"""
rfc = {
"url": f"tcp://127.0.0.1:{rfcPort}",
"sdp": sdp,
"mediaStreamOptions": mso,
}
jsonString = json.dumps(rfc)
mo = await scrypted_sdk.mediaManager.createMediaObject(
jsonString.encode(),
"x-scrypted/x-rfc4571",
{
"sourceId": self.id,
},
)
return mo
ffmpegInput: scrypted_sdk.FFmpegInput = {
"container": "ffmpeg",
"mediaStreamOptions": mso,
"inputArguments": [
"-analyzeduration",
"0",
"-probesize",
"100k",
"-f",
"h264",
"-i",
f"tcp://127.0.0.1:{port}",
"-f",
info.audioCodec,
"-ar",
f"{info.audioBitrate}",
"-ac",
"1",
"-i",
f"tcp://127.0.0.1:{audioPort}",
],
}
mo = await scrypted_sdk.mediaManager.createFFmpegMediaObject(
ffmpegInput,
{
"sourceId": self.id,
},
)
return mo
def getVideoStreamOptionsInternal(self) -> list[ResponseMediaStreamOptions]:
ret: List[ResponseMediaStreamOptions] = []
ret.append(
{
"id": "mainstream",
"name": "Main Stream",
"video": {
"codec": "h264",
"width": 2560 if self.camera.is_2k else 1920,
"height": 1440 if self.camera.is_2k else 1080,
},
"audio": {},
}
)
# not all wyze can substream, need to create an exhaustive list?
# wyze pan v2 does not, for example. others seem to set can_substream to False,
# but DO actually support it
ret.append(
{
"id": "substream",
"name": "Substream",
"video": {
"codec": "h264",
"width": 640,
"height": 360,
},
"audio": {},
}
)
return ret
async def getVideoStreamOptions(self) -> list[ResponseMediaStreamOptions]:
return self.getVideoStreamOptionsInternal()
class WyzePlugin(scrypted_sdk.ScryptedDeviceBase, DeviceProvider):
cameras: Dict[str, wyzecam.WyzeCamera]
account: wyzecam.WyzeAccount
tutk_platform_lib: str
def __init__(self):
super().__init__()
self.cameras = {}
self.account = None
if sys.platform != "linux":
self.print("Wyze plugin must be installed under Scrypted for Linux.")
return
if platform.machine() == "x86_64":
suffix = "amd64"
elif platform.machine() == "aarch64":
suffix = "arm64"
else:
self.print("Architecture not supported.")
return
libVersion = "v1"
self.tutk_platform_lib = self.downloadFile(
f"https://github.com/koush/docker-wyze-bridge/raw/main/app/lib.{suffix}",
f"{libVersion}/lib.{suffix}",
)
self.wyze_iotc = wyzecam.WyzeIOTC(
tutk_platform_lib=self.tutk_platform_lib,
sdk_key=sdkKey,
max_num_av_channels=32,
)
self.wyze_iotc.initialize()
self.print(self.tutk_platform_lib)
asyncio.ensure_future(self.refreshDevices())
def downloadFile(self, url: str, filename: str):
filesPath = os.path.join(os.environ["SCRYPTED_PLUGIN_VOLUME"], "files")
fullpath = os.path.join(filesPath, filename)
if os.path.isfile(fullpath):
return fullpath
os.makedirs(os.path.dirname(fullpath), exist_ok=True)
tmp = fullpath + ".tmp"
urllib.request.urlretrieve(url, tmp)
os.rename(tmp, fullpath)
return fullpath
async def getDevice(self, nativeId: str) -> Any:
camera = self.cameras.get(nativeId)
if not camera:
return
return WyzeCamera(nativeId, self, camera)
def safeParseJsonStorage(self, key: str):
try:
return json.loads(self.storage.getItem(key))
except:
return None
async def refreshDevices(self):
print("refreshing")
email = self.safeParseJsonStorage("email")
password = self.safeParseJsonStorage("password")
keyId = self.safeParseJsonStorage("keyId")
apiKey = self.safeParseJsonStorage("apiKey")
if not email or not password or not keyId or not apiKey:
self.print("Wyze Plugin Settings not configured.")
return
auth_info = wyzecam.login(email, password, api_key=apiKey, key_id=keyId)
self.account = wyzecam.get_user_info(auth_info)
cameras = wyzecam.get_camera_list(auth_info)
manifest: scrypted_sdk.DeviceManifest = {"devices": []}
for camera in cameras:
self.cameras[camera.p2p_id] = camera
interfaces: List[ScryptedInterface] = [
ScryptedInterface.Settings.value,
ScryptedInterface.VideoCamera.value,
]
if camera.is_pan_cam:
interfaces.append(ScryptedInterface.PanTiltZoom.value)
if camera.is_battery:
interfaces.append(ScryptedInterface.Battery.value)
if camera.is_vertical:
deviceType = ScryptedDeviceType.Doorbell.value
interfaces.append(ScryptedInterface.BinarySensor.value)
else:
deviceType = ScryptedDeviceType.Camera.value
device: scrypted_sdk.Device = {
"nativeId": camera.p2p_id,
"type": deviceType,
"name": camera.nickname,
"interfaces": interfaces,
"info": {
"firmware": camera.firmware_ver,
"ip": camera.ip,
"mac": camera.mac,
"model": camera.model_name,
},
}
manifest["devices"].append(device)
await scrypted_sdk.deviceManager.onDevicesChanged(manifest)
async def getSettings(self):
ret: List[Setting] = []
ret.append(
{
"key": "email",
"title": "Email",
"description": "The email used to log into the Wyze account. This can not be a Google or Apple Sign in via OAuth.",
"value": self.safeParseJsonStorage("email"),
}
)
ret.append(
{
"key": "password",
"title": "Password",
"type": "password",
"value": self.safeParseJsonStorage("password"),
}
)
ret.append(
{
"key": "keyId",
"title": "Key Id",
"description": "The Key Id retrieved from the Wyze portal.",
"value": self.safeParseJsonStorage("keyId"),
}
)
ret.append(
{
"key": "apiKey",
"title": "API Key",
"type": "password",
"description": "The API Key retrieved from the Wyze portal.",
"value": self.safeParseJsonStorage("apiKey"),
}
)
return ret
async def putSetting(self, key, value):
self.storage.setItem(key, json.dumps(value))
asyncio.ensure_future(self.refreshDevices())
await scrypted_sdk.deviceManager.onDeviceEvent(
None, ScryptedInterface.Settings.value, None
)
def create_scrypted_plugin():
return WyzePlugin()

View File

@@ -0,0 +1,3 @@
pydantic
requests
xxtea

1
plugins/wyze/src/wyzecam Symbolic link
View File

@@ -0,0 +1 @@
../docker-wyze-bridge/app/wyzecam/

View File

@@ -0,0 +1,13 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "ES2021",
"resolveJsonModule": true,
"moduleResolution": "Node16",
"esModuleInterop": true,
"sourceMap": true
},
"include": [
"src/**/*"
]
}

4
sdk/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/sdk",
"version": "0.2.108",
"version": "0.3.3",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/sdk",
"version": "0.2.108",
"version": "0.3.3",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/sdk",
"version": "0.2.108",
"version": "0.3.3",
"description": "",
"main": "dist/src/index.js",
"exports": {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/types",
"version": "0.2.99",
"version": "0.3.3",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/types",
"version": "0.2.99",
"version": "0.3.3",
"license": "ISC",
"devDependencies": {
"@types/rimraf": "^3.0.2",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/types",
"version": "0.2.99",
"version": "0.3.3",
"description": "",
"main": "dist/index.js",
"author": "",

View File

@@ -272,12 +272,18 @@ class ClipPath(TypedDict):
pass
class Point(TypedDict):
pass
class AudioStreamOptions(TypedDict):
bitrate: float
codec: str
encoder: str
profile: str
sampleRate: float
class HttpResponseOptions(TypedDict):
@@ -288,14 +294,16 @@ class ObjectDetectionResult(TypedDict):
boundingBox: tuple[float, float, float, float] # x, y, width, height
className: str # The detection class of the object.
clipPaths: list[ClipPath] # The detection clip paths that outlines various features or segments, like traced facial features.
cost: float # The certainty that this is correct tracked object.
descriptor: str # A base64 encoded Float32Array that represents the vector descriptor of the detection. Can be used to compute euclidian distance to determine similarity.
history: ObjectDetectionHistory
id: str # The id of the tracked object.
label: str # The label of the object, if it was recognized as a familiar object (person, pet, etc).
landmarks: list[Point] # The detection landmarks, like key points in a face landmarks.
movement: Union[ObjectDetectionHistory, Any] # Movement history will track the first/last time this object was moving.
name: str # The name of the object, if it was recognized as a familiar object (person, pet, etc).
resources: VideoResource
score: float
zoneHistory: Any
zones: list[str]
class ObjectDetectionZone(TypedDict):
@@ -310,6 +318,31 @@ class PictureDimensions(TypedDict):
height: float
width: float
class RequestMediaStreamAdaptiveOptions(TypedDict):
keyframe: bool
packetLoss: bool
pictureLoss: bool
reconfigure: bool
resize: bool
class RequestVideoStreamOptions(TypedDict):
bitrate: float
bitrateControl: Any | Any
clientHeight: float
clientWidth: float
codec: str
fps: float
h264Info: H264Info
height: float
idrIntervalMillis: float # Key Frame interval in milliseconds.
keyframeInterval: float # Key Frame interval in frames.
maxBitrate: float
minBitrate: float
profile: str
width: float
class ScryptedDeviceAccessControl(TypedDict):
"""ScryptedDeviceAccessControl describes the methods and properties on a device that will be visible to the user. If methods is nullish, the user will be granted full access to all methods. If properties is nullish, the user will be granted full access to all properties. If events is nullish, the user will be granted full access to all events."""
@@ -631,8 +664,6 @@ class RecordedEventOptions(TypedDict):
count: float
endTime: float
reverseOrder: bool
startId: str
startTime: float
class RecordingStreamThumbnailOptions(TypedDict):
@@ -644,11 +675,12 @@ class RecordingStreamThumbnailOptions(TypedDict):
class RequestMediaStreamOptions(TypedDict):
"""Options passed to VideoCamera.getVideoStream to request specific media formats. The audio/video properties may be omitted to indicate no audio/video is available when calling getVideoStreamOptions or no audio/video is requested when calling getVideoStream."""
adaptive: bool # Request an adaptive bitrate stream, if available. The destination will need to report packet loss indication.
adaptive: bool | RequestMediaStreamAdaptiveOptions # Request an adaptive bitrate stream, if available. The destination will need to report packet loss indication.
audio: AudioStreamOptions
container: str # The container type of this stream, ie: mp4, mpegts, rtsp.
destination: MediaStreamDestination # The intended destination for this media stream. May be used as a hint to determine which main/substream to send if no id is explicitly provided.
destinationId: str # The destination id for this media stream. This should generally be the IP address of the destination, if known. May be used by to determine stream selection and track dynamic bitrate history.
destinationType: str # The destination type of the target of this media stream. This should be the calling application package name. Used for logging or adaptive bitrate fingerprinting.
id: str
metadata: Any # Stream specific metadata.
name: str
@@ -657,7 +689,7 @@ class RequestMediaStreamOptions(TypedDict):
refresh: bool # Specify the stream refresh behavior when this stream is requested. Use case is primarily for perioidic snapshot of streams while they are active.
route: Any | Any | Any # When retrieving media, setting route directs how the media should be retrieved and exposed. A direct route will get the stream as is from the source. This will bypass any intermediaries if possible, such as an NVR or restreamers. An external route will request that that provided route is exposed to the local network.
tool: MediaStreamTool # The tool was used to write the container or will be used to read teh container. Ie, scrypted, the ffmpeg tools, gstreamer.
video: VideoStreamOptions
video: RequestVideoStreamOptions
class RequestPictureOptions(TypedDict):
@@ -670,11 +702,12 @@ class RequestPictureOptions(TypedDict):
class RequestRecordingStreamOptions(TypedDict):
"""Options passed to VideoCamera.getVideoStream to request specific media formats. The audio/video properties may be omitted to indicate no audio/video is available when calling getVideoStreamOptions or no audio/video is requested when calling getVideoStream."""
adaptive: bool # Request an adaptive bitrate stream, if available. The destination will need to report packet loss indication.
adaptive: bool | RequestMediaStreamAdaptiveOptions # Request an adaptive bitrate stream, if available. The destination will need to report packet loss indication.
audio: AudioStreamOptions
container: str # The container type of this stream, ie: mp4, mpegts, rtsp.
destination: MediaStreamDestination # The intended destination for this media stream. May be used as a hint to determine which main/substream to send if no id is explicitly provided.
destinationId: str # The destination id for this media stream. This should generally be the IP address of the destination, if known. May be used by to determine stream selection and track dynamic bitrate history.
destinationType: str # The destination type of the target of this media stream. This should be the calling application package name. Used for logging or adaptive bitrate fingerprinting.
duration: float
id: str
loop: bool
@@ -687,7 +720,7 @@ class RequestRecordingStreamOptions(TypedDict):
route: Any | Any | Any # When retrieving media, setting route directs how the media should be retrieved and exposed. A direct route will get the stream as is from the source. This will bypass any intermediaries if possible, such as an NVR or restreamers. An external route will request that that provided route is exposed to the local network.
startTime: float
tool: MediaStreamTool # The tool was used to write the container or will be used to read teh container. Ie, scrypted, the ffmpeg tools, gstreamer.
video: VideoStreamOptions
video: RequestVideoStreamOptions
class ResponseMediaStreamOptions(TypedDict):
"""Options passed to VideoCamera.getVideoStream to request specific media formats. The audio/video properties may be omitted to indicate no audio/video is available when calling getVideoStreamOptions or no audio/video is requested when calling getVideoStream."""
@@ -782,8 +815,6 @@ class VideoClipOptions(TypedDict):
aspectRatio: float
count: float
endTime: float
reverseOrder: bool
startId: str
startTime: float
class VideoClipThumbnailOptions(TypedDict):

View File

@@ -490,11 +490,17 @@ export interface VideoStreamOptions {
h264Info?: H264Info;
}
export interface RequestVideoStreamOptions extends VideoStreamOptions {
clientWidth?: number;
clientHeight?: number;
}
export interface AudioStreamOptions {
codec?: string;
encoder?: string;
profile?: string;
bitrate?: number;
sampleRate?: number;
}
export type MediaStreamSource = "local" | "cloud";
@@ -569,6 +575,14 @@ export interface ResponseMediaStreamOptions extends MediaStreamOptions {
export type MediaStreamDestination = "local" | "remote" | "medium-resolution" | "low-resolution" | "local-recorder" | "remote-recorder";
export interface RequestMediaStreamAdaptiveOptions {
packetLoss?: boolean;
pictureLoss?: boolean;
keyframe?: boolean;
reconfigure?: boolean;
resize?: boolean;
}
export interface RequestMediaStreamOptions extends MediaStreamOptions {
/**
* When retrieving media, setting route directs how the media should be
@@ -601,11 +615,20 @@ export interface RequestMediaStreamOptions extends MediaStreamOptions {
*/
destinationId?: string;
/**
* The destination type of the target of this media stream. This
* should be the calling application package name. Used for logging
* or adaptive bitrate fingerprinting.
*/
destinationType?: string;
/**
* Request an adaptive bitrate stream, if available. The destination
* will need to report packet loss indication.
*/
adaptive?: boolean;
adaptive?: boolean | RequestMediaStreamAdaptiveOptions;
video?: RequestVideoStreamOptions;
}
export interface MediaStreamPacketLoss {
@@ -627,6 +650,10 @@ export interface MediaStreamFeedback {
reportPacketLoss(report: MediaStreamPacketLoss): Promise<void>;
reportPictureLoss(): Promise<void>;
reportEstimatedMaxBitrate(bitrate: number): Promise<void>;
resizeStream(options: {
width: number;
height: number;
}): Promise<void>;
}
/**
@@ -719,9 +746,7 @@ export interface RecordedEvent {
export interface RecordedEventOptions {
startTime?: number;
endTime?: number;
startId?: string;
count?: number;
reverseOrder?: boolean;
}
export interface EventRecorder {
@@ -753,9 +778,7 @@ export interface VideoClip {
export interface VideoClipOptions extends VideoClipThumbnailOptions {
startTime?: number;
endTime?: number;
startId?: string;
count?: number;
reverseOrder?: boolean;
}
export interface VideoClipThumbnailOptions {
@@ -1288,7 +1311,6 @@ export interface BoundingBoxResult {
* x, y, width, height
*/
boundingBox?: [number, number, number, number];
zoneHistory?: { [zone: string]: ObjectDetectionHistory };
zones?: string[];
history?: ObjectDetectionHistory;
}
@@ -1306,9 +1328,22 @@ export interface ObjectDetectionResult extends BoundingBoxResult {
*/
className: ObjectDetectionClass;
/**
* The name of the object, if it was recognized as a familiar object (person, pet, etc).
* The label of the object, if it was recognized as a familiar object (person, pet, etc).
*/
name?: string;
label?: string;
/**
* A base64 encoded Float32Array that represents the vector descriptor of the detection.
* Can be used to compute euclidian distance to determine similarity.
*/
descriptor?: string;
/**
* The detection landmarks, like key points in a face landmarks.
*/
landmarks?: Point[];
/**
* The detection clip paths that outlines various features or segments, like traced facial features.
*/
clipPaths?: ClipPath[];
score: number;
resources?: VideoResource;
/**

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/server",
"version": "0.67.0",
"version": "0.72.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/server",
"version": "0.67.0",
"version": "0.72.0",
"license": "ISC",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/server",
"version": "0.68.0",
"version": "0.72.0",
"description": "",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",

Some files were not shown because too many files have changed in this diff Show More