Compare commits

...

69 Commits

Author SHA1 Message Date
Koushik Dutta
5de67fca86 server: fix python 3.8 issues 2023-04-18 10:45:20 -07:00
Koushik Dutta
98dc0b1b6d postrelease 2023-04-18 10:44:57 -07:00
Koushik Dutta
a05595ecc7 pam-diff/videoanalysis: fix performance, remove sharp dependency 2023-04-18 00:26:29 -07:00
Koushik Dutta
87be4648f1 prebeta 2023-04-17 22:14:36 -07:00
Koushik Dutta
60e51adb41 postrelease 2023-04-17 14:24:33 -07:00
Koushik Dutta
ace7720fe1 videoanalysis: fix snapshot hangs caused by HOL jpeg 2023-04-17 12:52:46 -07:00
Koushik Dutta
b9eb74d403 videoanalysis: add prebuffer hint 2023-04-17 09:55:20 -07:00
Koushik Dutta
fb7353383d predict: rollback rpc change until server is published 2023-04-17 08:46:32 -07:00
Koushik Dutta
bee119b486 python-codecs: handle vips rgba conversion 2023-04-17 08:46:24 -07:00
Koushik Dutta
0b6ffc2b87 predict: strip out allow list 2023-04-16 20:50:49 -07:00
Koushik Dutta
3863527b4d server: fix publish scripts 2023-04-16 13:11:44 -07:00
Koushik Dutta
51c48f4a1c prebeta 2023-04-16 13:10:41 -07:00
Koushik Dutta
4c138e9b4c prebeta 2023-04-16 12:11:25 -07:00
Koushik Dutta
e762c305a3 server: implement various python apis 2023-04-16 12:10:20 -07:00
Koushik Dutta
5bce335288 server: implement various python apis 2023-04-16 12:08:48 -07:00
Koushik Dutta
8201e9883a sdk: update python sdk 2023-04-16 11:43:02 -07:00
Koushik Dutta
74e5884285 videoanalysis: fix generator leak 2023-04-16 10:13:31 -07:00
Koushik Dutta
9cffd9ffbe server: fix noop cluster connect 2023-04-15 21:35:36 -07:00
Koushik Dutta
d8b617f2ae prebeta 2023-04-15 21:35:07 -07:00
Koushik Dutta
aeb564aa5d python-codecs: fix rgb->grasycale conversion 2023-04-15 21:22:52 -07:00
Koushik Dutta
45f672883a sdk: improve moving object metadata 2023-04-15 20:14:05 -07:00
Koushik Dutta
c0ff857a1b server: improve cluster resolution 2023-04-15 20:13:43 -07:00
Koushik Dutta
64f7e31f54 prebeta 2023-04-15 20:11:40 -07:00
Koushik Dutta
6b55f8876e prebeta 2023-04-15 15:12:10 -07:00
Koushik Dutta
718a31f2c5 prebeta 2023-04-15 15:02:32 -07:00
Koushik Dutta
c1e1d50fa5 sdk: publish 2023-04-15 10:14:42 -07:00
Koushik Dutta
75c4a1939f server: publish beta 2023-04-15 09:33:23 -07:00
Koushik Dutta
0d703c2aff predict: remove filter options 2023-04-15 09:33:10 -07:00
Koushik Dutta
0a6e4fda75 sdk: add support for designating object is moving 2023-04-14 22:29:05 -07:00
Koushik Dutta
4c2de9e443 server: add getDevice convenience method for pluginId/nativeId 2023-04-14 22:24:12 -07:00
Koushik Dutta
b8a4fedf1a client: publish 2023-04-14 22:08:58 -07:00
Koushik Dutta
79d9f1d4a1 server: add getDevice convenience method for pluginId/nativeId 2023-04-14 22:07:49 -07:00
Koushik Dutta
983213c578 sort-tracker: deprecate 2023-04-14 16:06:06 -07:00
Koushik Dutta
7dd3d71ebd videoanalysis: remove problematic ffmpeg video generator 2023-04-14 13:36:56 -07:00
Koushik Dutta
493f8deeef Revert "server: watch for dangling python processes"
This reverts commit b29f2d5ee1.
2023-04-14 13:02:51 -07:00
Koushik Dutta
b29f2d5ee1 server: watch for dangling python processes 2023-04-14 12:52:16 -07:00
Koushik Dutta
96bda10123 sort-tracker: remove average area check 2023-04-14 09:58:55 -07:00
Koushik Dutta
3294700d31 core: fix ui refresh issue 2023-04-14 08:07:12 -07:00
Koushik Dutta
0cf77d4c76 core: add support for date./time settings 2023-04-13 19:43:20 -07:00
Koushik Dutta
953841e3a5 update samples 2023-04-13 19:40:02 -07:00
Koushik Dutta
393c1017df sdk: add date/time/datetime types 2023-04-13 13:05:44 -07:00
Koushik Dutta
f50176d14a server: fix CPU usage being lost on fork exit 2023-04-13 12:53:21 -07:00
Koushik Dutta
7f2bf0b542 webrtc: fix ffmpeg leak 2023-04-13 12:51:52 -07:00
Koushik Dutta
9e3990400c zwave: publish 2023-04-13 12:51:41 -07:00
Koushik Dutta
95eed80735 webrtc: fix ffmpeg leak 2023-04-13 12:51:29 -07:00
Koushik Dutta
be43d0c017 zwave: publish 2023-04-12 09:47:05 -07:00
mikeburgh
386ea9a98a Fixing sensor mapping to position (#719) 2023-04-11 20:11:46 -07:00
Koushik Dutta
9b40978f61 client/server: fix various async generator bugs in remote client 2023-04-11 13:53:38 -07:00
Koushik Dutta
f0ee435cd0 videoanalysis: fix detection calculation/throttling in snapshot mode 2023-04-10 17:49:33 -07:00
Koushik Dutta
30748784ef videoanalysis: fix logging 2023-04-10 14:30:14 -07:00
Koushik Dutta
8310e33719 videoanalysis: profile system performance and use snapshot mode when necessary 2023-04-10 12:48:32 -07:00
Koushik Dutta
1d18697161 videoanalysis: watch for pipeline hangs. fix race conditions around pipeline startup/termination. 2023-04-10 10:17:26 -07:00
Koushik Dutta
d500b3fd6c h264 packetizer: update codec information with stapa packets 2023-04-10 08:06:41 -07:00
Koushik Dutta
95ae916b6c Merge branch 'main' of github.com:koush/scrypted 2023-04-09 21:02:47 -07:00
Koushik Dutta
ec3e16f20f onvif (reolink): implement two way audio 2023-04-09 21:02:42 -07:00
Brett Jia
30d28f543c arlo: boolean settings + publish (#713) 2023-04-09 14:49:43 -07:00
Koushik Dutta
e0cce24999 python-codecs: publish 2023-04-09 12:04:40 -07:00
Koushik Dutta
409b25f8b0 python-codecs: fix windows process cleanup 2023-04-09 12:04:16 -07:00
Koushik Dutta
8f278abec8 videoanalysis: fix bug where stream failure may cause motion detector to never restart 2023-04-08 11:10:49 -07:00
Koushik Dutta
d6179dab82 prebeta 2023-04-08 10:19:11 -07:00
Koushik Dutta
ed186e2142 server/rpc: improve typings on rpc message type 2023-04-08 10:19:06 -07:00
Koushik Dutta
3c021bb2c8 prebeta 2023-04-08 10:17:24 -07:00
Koushik Dutta
c522edc622 server/rpc: improve typings on rpc message type 2023-04-08 10:17:18 -07:00
Koushik Dutta
022a103bcb prebeta 2023-04-08 10:05:07 -07:00
Koushik Dutta
efd125b6e4 server/rpc: add Uint8Array to node transport safe arguments 2023-04-08 10:05:02 -07:00
Koushik Dutta
19f7688a65 python-codecs: publish 2023-04-08 09:18:54 -07:00
Koushik Dutta
7f18e4629c prebeta 2023-04-08 09:18:38 -07:00
Koushik Dutta
dfe2c937a1 server: add hook for cluster peer creation 2023-04-08 09:18:31 -07:00
Koushik Dutta
47d7a23a3d postrelease 2023-04-07 21:15:04 -07:00
113 changed files with 2014 additions and 1701 deletions

3
.gitmodules vendored
View File

@@ -32,9 +32,6 @@
[submodule "plugins/sample-cameraprovider"]
path = plugins/sample-cameraprovider
url = ../../koush/scrypted-sample-cameraprovider
[submodule "plugins/tensorflow-lite/sort_oh"]
path = plugins/sort-tracker/sort_oh
url = ../../koush/sort_oh.git
[submodule "plugins/cloud/node-nat-upnp"]
path = plugins/cloud/node-nat-upnp
url = ../../koush/node-nat-upnp.git

View File

@@ -1,15 +1,17 @@
export class Deferred<T> {
finished = false;
resolve!: (value: T|PromiseLike<T>) => void;
reject!: (error: Error) => void;
resolve!: (value: T|PromiseLike<T>) => this;
reject!: (error: Error) => this;
promise: Promise<T> = new Promise((resolve, reject) => {
this.resolve = v => {
this.finished = true;
resolve(v);
return this;
};
this.reject = e => {
this.finished = true;
reject(e);
return this;
};
});
}

View File

@@ -681,7 +681,7 @@ export class RtspClient extends RtspBase {
});
}
async setup(options: RtspClientTcpSetupOptions | RtspClientUdpSetupOptions) {
async setup(options: RtspClientTcpSetupOptions | RtspClientUdpSetupOptions, headers?: Headers) {
const protocol = options.type === 'udp' ? '' : '/TCP';
const client = options.type === 'udp' ? 'client_port' : 'interleaved';
let port: number;
@@ -697,9 +697,9 @@ export class RtspClient extends RtspBase {
port = options.dgram.address().port;
options.dgram.on('message', data => options.onRtp(undefined, data));
}
const headers: any = {
headers = Object.assign({
Transport: `RTP/AVP${protocol};unicast;${client}=${port}-${port + 1}`,
};
}, headers);
const response = await this.request('SETUP', headers, options.path);
let interleaved: {
begin: number;

View File

@@ -1,15 +1,15 @@
{
"name": "@scrypted/client",
"version": "1.1.43",
"version": "1.1.51",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/client",
"version": "1.1.43",
"version": "1.1.51",
"license": "ISC",
"dependencies": {
"@scrypted/types": "^0.2.78",
"@scrypted/types": "^0.2.80",
"axios": "^0.25.0",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"
@@ -21,9 +21,9 @@
}
},
"node_modules/@scrypted/types": {
"version": "0.2.78",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.78.tgz",
"integrity": "sha512-SiIUh9ph96aZPjt/oO+W/mlJobrP02ADwFDI9jnvw8/UegUti2x/7JE8Pi3kGXOIkN+cX74Qg4xJEMIpdpO1zw=="
"version": "0.2.80",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.80.tgz",
"integrity": "sha512-YVu7jcD5sYgjJLP7kH1K2FJzqrlcjdpDxzZoLXudZCKiujldbmLYcwglSgnN9bRqkKZcGOfru/WssvQj+0JioQ=="
},
"node_modules/@socket.io/component-emitter": {
"version": "3.1.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/client",
"version": "1.1.43",
"version": "1.1.51",
"description": "",
"main": "dist/packages/client/src/index.js",
"scripts": {
@@ -17,7 +17,7 @@
"typescript": "^4.9.5"
},
"dependencies": {
"@scrypted/types": "^0.2.78",
"@scrypted/types": "^0.2.80",
"axios": "^0.25.0",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"

View File

@@ -7,6 +7,7 @@ import { timeoutPromise } from "../../../common/src/promise-utils";
import { BrowserSignalingSession, waitPeerConnectionIceConnected, waitPeerIceConnectionClosed } from "../../../common/src/rtc-signaling";
import { DataChannelDebouncer } from "../../../plugins/webrtc/src/datachannel-debouncer";
import type { IOSocket } from '../../../server/src/io';
import { MediaObject } from '../../../server/src/plugin/mediaobject';
import type { MediaObjectRemote } from '../../../server/src/plugin/plugin-api';
import { attachPluginRemote } from '../../../server/src/plugin/plugin-remote';
import { RpcPeer } from '../../../server/src/rpc';
@@ -505,22 +506,7 @@ export async function connectScryptedClient(options: ScryptedClientOptions): Pro
console.log('api attached', Date.now() - start);
mediaManager.createMediaObject = async<T extends MediaObjectOptions>(data: any, mimeType: string, options: T) => {
const mo: MediaObjectRemote & {
[RpcPeer.PROPERTY_PROXY_PROPERTIES]: any,
[RpcPeer.PROPERTY_JSON_DISABLE_SERIALIZATION]: true,
} = {
[RpcPeer.PROPERTY_JSON_DISABLE_SERIALIZATION]: true,
[RpcPeer.PROPERTY_PROXY_PROPERTIES]: {
mimeType,
sourceId: options?.sourceId,
},
mimeType,
sourceId: options?.sourceId,
async getData() {
return data;
},
};
return mo as any;
return new MediaObject(mimeType, data, options) as any;
}
const { browserSignalingSession, connectionManagementId, updateSessionId } = rpcPeer.params;

View File

@@ -16,7 +16,7 @@
"multiparty": "^4.2.2"
},
"devDependencies": {
"@types/node": "^16.11.0"
"@types/node": "^18.15.11"
}
},
"../../common": {
@@ -36,7 +36,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.68",
"version": "0.2.87",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -100,9 +100,9 @@
}
},
"node_modules/@types/node": {
"version": "16.11.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.0.tgz",
"integrity": "sha512-8MLkBIYQMuhRBQzGN9875bYsOhPnf/0rgXGo66S2FemHkhbn9qtsz9ywV1iCG+vbjigE4WUNVvw37Dx+L0qsPg=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/auth-header": {
"version": "1.0.0",
@@ -291,9 +291,9 @@
}
},
"@types/node": {
"version": "16.11.0",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.0.tgz",
"integrity": "sha512-8MLkBIYQMuhRBQzGN9875bYsOhPnf/0rgXGo66S2FemHkhbn9qtsz9ywV1iCG+vbjigE4WUNVvw37Dx+L0qsPg=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"auth-header": {
"version": "1.0.0",

View File

@@ -36,12 +36,12 @@
},
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/sdk": "file:../../sdk",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/multiparty": "^0.0.33",
"multiparty": "^4.2.2"
},
"devDependencies": {
"@types/node": "^16.11.0"
"@types/node": "^18.15.11"
}
}

View File

@@ -1,19 +1,19 @@
{
"name": "@scrypted/arlo",
"version": "0.7.12",
"version": "0.7.13",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/arlo",
"version": "0.7.12",
"version": "0.7.13",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.85",
"version": "0.2.87",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/arlo",
"version": "0.7.12",
"version": "0.7.13",
"description": "Arlo Plugin for Scrypted",
"keywords": [
"scrypted",

View File

@@ -273,9 +273,10 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
])
return result
@async_print_exception_guard
async def putSetting(self, key, value) -> None:
if key in ["webrtc_emulation", "two_way_audio", "wired_to_power"]:
self.storage.setItem(key, value == "true")
self.storage.setItem(key, value == "true" or value == True)
await self.provider.discover_devices()
async def getPictureOptions(self) -> List[ResponsePictureOptions]:

View File

@@ -480,6 +480,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
return results
@async_print_exception_guard
async def putSetting(self, key: str, value: SettingValue) -> None:
if not self.validate_setting(key, value):
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
@@ -492,7 +493,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
# force arlo client to be invalidated and reloaded
self.invalidate_arlo_client()
elif key == "plugin_verbosity":
self.storage.setItem(key, "Verbose" if value == "true" else "Normal")
self.storage.setItem(key, "Verbose" if value == "true" or value == True else "Normal")
self.propagate_verbosity()
skip_arlo_client = True
else:

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.1.108",
"version": "0.1.110",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.1.108",
"version": "0.1.110",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.1.108",
"version": "0.1.110",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -27,25 +27,8 @@ export class Scheduler {
];
const date = new Date();
if (schedule.clockType === 'AM' || schedule.clockType === 'PM') {
let hour = schedule.hour;
if (schedule.clockType === 'AM') {
if (hour === 12)
hour -= 12;
}
else {
if (hour != 12)
hour += 12;
}
date.setHours(hour);
date.setMinutes(schedule.minute, 0, 0);
}
else if (schedule.clockType === '24HourClock') {
date.setHours(schedule.hour, schedule.minute, 0, 0);
}
else {
throw new Error('sunrise/sunset clock not supported');
}
date.setHours(schedule.hour);
date.setMinutes(schedule.minute);
const ret: ScryptedDevice = {
async setName() { },
@@ -65,7 +48,7 @@ export class Scheduler {
if (!days[day])
continue;
source.log.i(`event will fire at ${future}`);
source.log.i(`event will fire at ${future.toLocaleString()}`);
return future;
}
source.log.w('event will never fire');
@@ -80,6 +63,7 @@ export class Scheduler {
}
const delay = when.getTime() - Date.now();
source.log.i(`event will fire in ${Math.round(delay / 60 / 1000)} minutes.`);
let timeout = setTimeout(() => {
reschedule();

View File

@@ -13,7 +13,6 @@
"@fortawesome/free-solid-svg-icons": "^6.3.0",
"@fortawesome/vue-fontawesome": "^2.0.8",
"@radial-color-picker/vue-color-picker": "^2.3.0",
"@scrypted/client": "file:../../../packages/client",
"@scrypted/common": "file:../../../common",
"@scrypted/sdk": "file:../../../sdk",
"@scrypted/types": "file:../../../sdk/types",
@@ -32,6 +31,7 @@
"register-service-worker": "^1.7.2",
"router": "^1.3.6",
"semver": "^6.3.0",
"v-calendar": "^2.4.1",
"vue": "^2.7.14",
"vue-apexcharts": "^1.6.2",
"vue-async-computed": "^3.9.0",
@@ -118,27 +118,24 @@
},
"../../../packages/client": {
"name": "@scrypted/client",
"version": "1.1.37",
"version": "1.1.48",
"extraneous": true,
"license": "ISC",
"dependencies": {
"@scrypted/types": "^0.2.64",
"adm-zip": "^0.5.9",
"@scrypted/types": "^0.2.78",
"axios": "^0.25.0",
"engine.io-client": "^6.2.2",
"linkfs": "^2.1.0",
"memfs": "^3.4.1",
"engine.io-client": "^6.4.0",
"rimraf": "^3.0.2"
},
"devDependencies": {
"@types/adm-zip": "^0.4.34",
"@types/ip": "^1.1.0",
"@types/node": "^17.0.17",
"typescript": "^4.7.4"
"@types/node": "^18.14.2",
"typescript": "^4.9.5"
}
},
"../../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.68",
"version": "0.2.87",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -175,7 +172,7 @@
},
"../../../sdk/types": {
"name": "@scrypted/types",
"version": "0.2.63",
"version": "0.2.79",
"license": "ISC",
"devDependencies": {
"@types/rimraf": "^3.0.2",
@@ -2265,6 +2262,16 @@
"integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==",
"dev": true
},
"node_modules/@popperjs/core": {
"version": "2.11.7",
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.7.tgz",
"integrity": "sha512-Cr4OjIkipTtcXKjAsm8agyleBuDHvxzeBoa1v543lbv1YaIwQjESsVcmjiWiPEbC1FIeHOG/Op9kdCmAmiS3Kw==",
"peer": true,
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/popperjs"
}
},
"node_modules/@radial-color-picker/color-wheel": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@radial-color-picker/color-wheel/-/color-wheel-2.2.0.tgz",
@@ -2287,10 +2294,6 @@
"vue": "^2.5.21"
}
},
"node_modules/@scrypted/client": {
"resolved": "../../../packages/client",
"link": true
},
"node_modules/@scrypted/common": {
"resolved": "../../../common",
"link": true
@@ -7819,7 +7822,6 @@
"version": "2.24.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.24.0.tgz",
"integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw==",
"dev": true,
"engines": {
"node": ">=0.11"
},
@@ -7828,6 +7830,14 @@
"url": "https://opencollective.com/date-fns"
}
},
"node_modules/date-fns-tz": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/date-fns-tz/-/date-fns-tz-1.3.8.tgz",
"integrity": "sha512-qwNXUFtMHTTU6CFSFjoJ80W8Fzzp24LntbjFFBgL/faqds4e5mo9mftoRLgr3Vi1trISsg4awSpYVsOQCRnapQ==",
"peerDependencies": {
"date-fns": ">=2.0.0"
}
},
"node_modules/de-indent": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz",
@@ -18977,6 +18987,31 @@
"uuid": "bin/uuid"
}
},
"node_modules/v-calendar": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/v-calendar/-/v-calendar-2.4.1.tgz",
"integrity": "sha512-nhzOlHM2cinv+8jIcnAx+nTo63U40szv3Ig41uLMpGK1U5sApgCP6ggigprsnlMOM5VRq1G/1B8rNHkRrLbGjw==",
"dependencies": {
"core-js": "^3.15.2",
"date-fns": "^2.22.1",
"date-fns-tz": "^1.1.4",
"lodash": "^4.17.21"
},
"peerDependencies": {
"@popperjs/core": "^2.4.0",
"vue": "^2.5.18"
}
},
"node_modules/v-calendar/node_modules/core-js": {
"version": "3.30.1",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.30.1.tgz",
"integrity": "sha512-ZNS5nbiSwDTq4hFosEDqm65izl2CWmLz0hARJMyNQBgkUZMIF51cQiMvIQKA6hvuaeWxQDP3hEedM1JZIgTldQ==",
"hasInstallScript": true,
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/core-js"
}
},
"node_modules/v8-compile-cache": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
@@ -22773,6 +22808,12 @@
"integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==",
"dev": true
},
"@popperjs/core": {
"version": "2.11.7",
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.7.tgz",
"integrity": "sha512-Cr4OjIkipTtcXKjAsm8agyleBuDHvxzeBoa1v543lbv1YaIwQjESsVcmjiWiPEbC1FIeHOG/Op9kdCmAmiS3Kw==",
"peer": true
},
"@radial-color-picker/color-wheel": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/@radial-color-picker/color-wheel/-/color-wheel-2.2.0.tgz",
@@ -22792,22 +22833,6 @@
"@radial-color-picker/rotator": "2.1.0"
}
},
"@scrypted/client": {
"version": "file:../../../packages/client",
"requires": {
"@scrypted/types": "^0.2.64",
"@types/adm-zip": "^0.4.34",
"@types/ip": "^1.1.0",
"@types/node": "^17.0.17",
"adm-zip": "^0.5.9",
"axios": "^0.25.0",
"engine.io-client": "^6.2.2",
"linkfs": "^2.1.0",
"memfs": "^3.4.1",
"rimraf": "^3.0.2",
"typescript": "^4.7.4"
}
},
"@scrypted/common": {
"version": "file:../../../common",
"requires": {
@@ -27308,8 +27333,13 @@
"date-fns": {
"version": "2.24.0",
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.24.0.tgz",
"integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw==",
"dev": true
"integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw=="
},
"date-fns-tz": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/date-fns-tz/-/date-fns-tz-1.3.8.tgz",
"integrity": "sha512-qwNXUFtMHTTU6CFSFjoJ80W8Fzzp24LntbjFFBgL/faqds4e5mo9mftoRLgr3Vi1trISsg4awSpYVsOQCRnapQ==",
"requires": {}
},
"de-indent": {
"version": "1.0.2",
@@ -36063,6 +36093,24 @@
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
"dev": true
},
"v-calendar": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/v-calendar/-/v-calendar-2.4.1.tgz",
"integrity": "sha512-nhzOlHM2cinv+8jIcnAx+nTo63U40szv3Ig41uLMpGK1U5sApgCP6ggigprsnlMOM5VRq1G/1B8rNHkRrLbGjw==",
"requires": {
"core-js": "^3.15.2",
"date-fns": "^2.22.1",
"date-fns-tz": "^1.1.4",
"lodash": "^4.17.21"
},
"dependencies": {
"core-js": {
"version": "3.30.1",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.30.1.tgz",
"integrity": "sha512-ZNS5nbiSwDTq4hFosEDqm65izl2CWmLz0hARJMyNQBgkUZMIF51cQiMvIQKA6hvuaeWxQDP3hEedM1JZIgTldQ=="
}
}
},
"v8-compile-cache": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",

View File

@@ -34,6 +34,7 @@
"register-service-worker": "^1.7.2",
"router": "^1.3.6",
"semver": "^6.3.0",
"v-calendar": "^2.4.1",
"vue": "^2.7.14",
"vue-apexcharts": "^1.6.2",
"vue-async-computed": "^3.9.0",

View File

@@ -40,7 +40,7 @@
<v-btn :dark="!isLive" v-on="on" small :color="isLive ? 'white' : 'blue'" :outlined="isLive">
<v-icon small color="white" :outlined="isLive">fa fa-calendar-alt</v-icon>&nbsp;{{ monthDay }}</v-btn>
</template>
<v-date-picker @input="datePicked"></v-date-picker>
<vc-date-picker mode="date" :value="startTime" @input="datePicked"></vc-date-picker>
</v-dialog>
<v-btn v-if="showNvr" :dark="!isLive" small :color="isLive ? 'white' : adjustingTime ? 'green' : 'blue'"
@@ -181,8 +181,8 @@ export default {
methods: {
datePicked(value) {
this.dateDialog = false;
const dt = datePickerLocalTimeToUTC(value);
this.streamRecorder(dt);
if (value && value.getTime)
this.streamRecorder(value.getTime());
},
doTimeScroll(e) {
if (!this.device.interfaces.includes(ScryptedInterface.VideoRecorder))

View File

@@ -22,6 +22,7 @@ export default {
watch: {
device() {
this.watchDevice();
this.refresh();
},
},
methods: {

View File

@@ -17,10 +17,21 @@ export default {
VueMarkdown,
CardTitle,
},
data() {
return {
token: 0,
}
},
methods: {
refresh() {
this.token++;
}
},
asyncComputed: {
readme: {
async get() {
return this.device.getReadmeMarkdown();;
await this.token;
return this.device.getReadmeMarkdown();
},
default: undefined,
}

View File

@@ -1,6 +1,10 @@
<template>
<div>
<v-checkbox v-if="lazyValue.type === 'boolean'" dense :readonly="lazyValue.readonly" v-model="booleanValue"
<vc-date-picker v-if="lazyValue.type === 'date'" mode="date" v-model="dateValue" :is-range="lazyValue.combobox"></vc-date-picker>
<vc-date-picker v-else-if="lazyValue.type === 'time'" mode="time" v-model="dateValue"
class="hide-header" :is-range="lazyValue.combobox"></vc-date-picker>
<vc-date-picker v-else-if="lazyValue.type === 'datetime'" mode="datetime" v-model="dateValue" :is-range="lazyValue.combobox"></vc-date-picker>
<v-checkbox v-else-if="lazyValue.type === 'boolean'" dense :readonly="lazyValue.readonly" v-model="booleanValue"
:label="lazyValue.title" :hint="lazyValue.description" :placeholder="lazyValue.placeholder" persistent-hint
@change="save" :class="lazyValue.description ? 'mb-2' : ''"></v-checkbox>
<div v-else-if="lazyValue.type === 'qrcode'">
@@ -134,6 +138,25 @@ export default {
return [];
}
},
dateValue: {
get() {
if (this.lazyValue.combobox) {
return {
start: new Date(parseInt(this.lazyValue.value?.[0]) || Date.now()),
end: new Date(parseInt(this.lazyValue.value?.[1]) || Date.now()),
};
}
return new Date(parseInt(this.lazyValue.value) || Date.now());
},
set(val) {
if (this.lazyValue.combobox) {
this.lazyValue.value = [val.start.getTime(), val.end.getTime()];
}
else {
this.lazyValue.value = val.getTime();
}
}
},
booleanValue: {
get() {
return (
@@ -251,6 +274,7 @@ export default {
},
createLazyValue() {
var type = this.value.type || "";
if (type.indexOf("[]") == -1 && type !== "clippath") {
return cloneDeep(this.value);
}
@@ -265,6 +289,7 @@ export default {
},
createInputValue() {
var type = this.lazyValue.type || "";
if (type.indexOf("[]") == -1 && type !== "clippath") {
return this.lazyValue;
}
@@ -287,4 +312,8 @@ export default {
.shift-up {
margin-top: -8px;
}
</style>
.hide-header .vc-date {
display: none !important;
}
</style>

View File

@@ -40,11 +40,11 @@
<v-btn v-on="on" small>
<v-icon x-small>fa fa-calendar-alt</v-icon>
&nbsp;
{{ year }}-{{ month }}-{{ date }}
{{ new Date(date).getFullYear() }}-{{ new Date(date).getMonth() }}-{{ new Date(date).getDate() }}
</v-btn>
</template>
<v-card>
<v-date-picker @input="onDate"> </v-date-picker>
<vc-date-picker mode="date" @input="onDate" v-model="date"> </vc-date-picker>
</v-card>
</v-dialog>
<v-btn text small disabled v-if="pages">{{ pageRange }}</v-btn>
@@ -70,7 +70,6 @@
</div>
</template>
<script>
import { datePickerLocalTimeToUTC } from "../common/date";
import { fetchClipThumbnail, fetchClipUrl } from "../common/videoclip";
import RPCInterface from "./RPCInterface.vue";
import Vue from "vue";
@@ -129,14 +128,11 @@ export default {
clips: {
async get() {
await this.refreshNonce;
const date = new Date();
const date = new Date(this.date);
date.setMilliseconds(0);
date.setSeconds(0);
date.setMinutes(0);
date.setHours(0);
date.setFullYear(this.year);
date.setMonth(this.month - 1);
date.setDate(this.date);
console.log(date);
const dt = date.getTime();
const ret = await this.device.getVideoClips({
@@ -165,9 +161,7 @@ export default {
fetchingImages: [],
page: 1,
dialog: false,
date: new Date().getDate(),
month: new Date().getMonth() + 1,
year: new Date().getFullYear(),
date: Date.now(),
};
},
methods: {
@@ -202,11 +196,8 @@ export default {
onDate(value) {
this.page = 1;
this.dialog = false;
const dt = datePickerLocalTimeToUTC(value);
const d = new Date(dt);
this.month = d.getMonth() + 1;
this.date = d.getDate();
this.year = d.getFullYear();
console.log(value);
this.date = value;
this.refresh();
},
},

View File

@@ -1,33 +1,12 @@
<template>
<v-layout row wrap justify-center align-center>
<v-flex xs3 md2 lg2 xl1 v-for="day of days" :key="day">
<v-btn
block
class="white--text"
@click="toggleDay(day)"
color="info"
small
:text="!lazyValue[day]"
>{{ day.substring(0, 3) }}</v-btn>
<v-btn block class="white--text" @click="toggleDay(day)" color="info" small :text="!lazyValue[day]">{{
day.substring(0, 3) }}</v-btn>
</v-flex>
<v-flex xs12>
<v-layout justify-center align-center>
<v-time-picker v-model="time" format="24hr" @input="onChange"></v-time-picker>
</v-layout>
</v-flex>
<v-flex xs12>
<v-layout justify-center align-center>
<v-flex xs12 md8 lg6 xl4>
<v-select
xs3
reverse
:items="clockTypes"
solo
item-value="id"
v-model="lazyValue.clockType"
@input="onChange"
></v-select>
</v-flex>
<vc-date-picker v-model="time" class="hide-header" @input="onChange" mode="time"></vc-date-picker>
</v-layout>
</v-flex>
</v-layout>
@@ -52,62 +31,37 @@ function zeroPrefix(arr, len) {
arr.push(i >= 10 ? i.toString() : "0" + i);
}
}
const clockTypes = [
{
id: "AM",
text: "AM"
},
{
id: "PM",
text: "PM"
},
{
text: "24 Hour Clock",
id: "TwentyFourHourClock"
},
{
text: "Before Sunrise",
id: "BeforeSunrise"
},
{
text: "After Sunrise",
id: "AfterSunrise"
},
{
text: "Before Sunset",
id: "BeforeSunset"
},
{
text: "After Sunset",
id: "AfterSunset"
}
];
zeroPrefix(hours, 24);
zeroPrefix(minutes, 59);
export default {
mixins: [RPCInterface],
data: function() {
data: function () {
return {
clockTypes,
days,
};
},
computed: {
time: {
get() {
return `${this.lazyValue.hour}:${this.lazyValue.minute}`;
const date = new Date();
date.setMilliseconds(0);
date.setSeconds(0);
date.setMinutes(this.lazyValue.minute);
date.setHours(this.lazyValue.hour);
return date;
},
set(value) {
this.lazyValue.hour = value.split(":")[0];
this.lazyValue.minute = value.split(":")[1];
this.lazyValue.hour = value.getHours();
this.lazyValue.minute = value.getMinutes();
this.onChange();
}
}
},
methods: {
toggleDay: function(day) {
toggleDay: function (day) {
this.lazyValue[day] = !this.lazyValue[day];
this.onChange();
},
@@ -117,11 +71,10 @@ export default {
ret.minute = ret.minute || 0;
return ret;
},
onChange: function() {
onChange: function () {
const schedule = {
hour: parseInt(this.lazyValue.hour) || 0,
minute: parseInt(this.lazyValue.minute) || 0,
clockType: this.lazyValue.clockType || "AM",
};
days.forEach(day => {
schedule[day] = this.lazyValue[day] || false;
@@ -139,9 +92,15 @@ export default {
-webkit-appearance: none;
appearance: none;
}
.semicolon-pad {
margin-left: 2px;
margin-right: 2px;
margin-top: 4px;
}
.hide-header .vc-date {
display: none !important;
}
</style>

View File

@@ -10,6 +10,13 @@ import './plugins/is-mobile';
import Launcher from './Launcher.vue'
import './registerServiceWorker'
import VCalendar from 'v-calendar';
// Use v-calendar & v-date-picker components
Vue.use(VCalendar, {
componentPrefix: 'vc', // Use <vc-calendar /> instead of <v-calendar />
});
// STYLES
// Main Theme SCSS
// import './assets/scss/theme.scss'

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/coreml",
"version": "0.1.9",
"version": "0.1.12",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.9",
"version": "0.1.12",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.9"
"version": "0.1.12"
}

View File

@@ -42,6 +42,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
labels_contents = open(labelsFile, 'r').read()
self.labels = parse_label_contents(labels_contents)
self.loop = asyncio.get_event_loop()
self.minThreshold = .2
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
@@ -53,9 +54,9 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
# run in executor if this is the plugin loop
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(predictExecutor, lambda: self.model.predict({'image': input, 'confidenceThreshold': .2 }))
out_dict = await asyncio.get_event_loop().run_in_executor(predictExecutor, lambda: self.model.predict({'image': input, 'confidenceThreshold': self.minThreshold }))
else:
out_dict = self.model.predict({'image': input, 'confidenceThreshold': .2 })
out_dict = self.model.predict({'image': input, 'confidenceThreshold': self.minThreshold })
coordinatesList = out_dict['coordinates']
@@ -65,7 +66,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
values = confidenceList
maxConfidenceIndex = max(range(len(values)), key=values.__getitem__)
maxConfidence = confidenceList[maxConfidenceIndex]
if maxConfidence < .2:
if maxConfidence < self.minThreshold:
continue
coordinates = coordinatesList[index]
@@ -90,6 +91,5 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
))
objs.append(obj)
allowList = settings.get('allowList', None) if settings else None
ret = self.create_detection_result(objs, src_size, allowList, cvss)
ret = self.create_detection_result(objs, src_size, cvss)
return ret

View File

@@ -12,11 +12,13 @@
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^16.9.1",
"@types/xml2js": "^0.4.9",
"axios": "^0.23.0",
"lodash": "^4.17.21",
"xml2js": "^0.4.23"
},
"devDependencies": {
"@types/node": "^18.15.11"
}
},
"../../common": {
@@ -36,7 +38,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.86",
"version": "0.2.87",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -100,9 +102,9 @@
"link": true
},
"node_modules/@types/node": {
"version": "16.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/@types/xml2js": {
"version": "0.4.9",
@@ -231,9 +233,9 @@
}
},
"@types/node": {
"version": "16.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"@types/xml2js": {
"version": "0.4.9",

View File

@@ -38,10 +38,12 @@
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^16.9.1",
"@types/xml2js": "^0.4.9",
"axios": "^0.23.0",
"lodash": "^4.17.21",
"xml2js": "^0.4.23"
},
"devDependencies": {
"@types/node": "^18.15.11"
}
}

View File

@@ -8,6 +8,8 @@ import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { HikvisionCameraAPI, HikvisionCameraEvent } from "./hikvision-camera-api";
import { hikvisionHttpsAgent } from './probe';
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
const { mediaManager } = sdk;
@@ -21,8 +23,8 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
detectedChannels: Promise<Map<string, MediaStreamOptions>>;
client: HikvisionCameraAPI;
onvifIntercom = new OnvifIntercom(this);
cp: ChildProcess;
activeIntercom: Awaited<ReturnType<typeof startRtpForwarderProcess>>;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
@@ -360,13 +362,11 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
async startIntercom(media: MediaObject): Promise<void> {
if (this.storage.getItem('twoWayAudio') === 'ONVIF') {
this.activeIntercom?.kill();
this.activeIntercom = undefined;
const options = await this.getConstructedVideoStreamOptions();
const stream = options[0];
const url = new URL(stream.url);
// amcrest onvif requires this proto query parameter, or onvif two way
// will not activate.
url.searchParams.set('proto', 'Onvif');
this.onvifIntercom.url = url.toString();
this.onvifIntercom.url = stream.url;
return this.onvifIntercom.startIntercom(media);
}
@@ -390,7 +390,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
}
}
catch (e) {
this.console.error('Fialure while determining two way audio codec', e);
this.console.error('Failure while determining two way audio codec', e);
}
if (codec === 'G.711ulaw') {
@@ -415,76 +415,64 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
const buffer = await mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput);
const ffmpegInput = JSON.parse(buffer.toString()) as FFmpegInput;
const args = ffmpegInput.inputArguments.slice();
args.unshift('-hide_banner');
args.push(
"-vn",
'-ar', '8000',
'-ac', '1',
'-acodec', codec,
'-f', format,
'pipe:3',
);
this.console.log('ffmpeg intercom', args);
const ffmpeg = await mediaManager.getFFmpegPath();
this.cp = child_process.spawn(ffmpeg, args, {
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
const passthrough = new PassThrough();
const open = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/open`;
const { data } = await this.getClient().digestAuth.request({
httpsAgent: hikvisionHttpsAgent,
method: 'PUT',
url: open,
});
this.cp.on('exit', () => this.cp = undefined);
ffmpegLogInitialOutput(this.console, this.cp);
const socket = this.cp.stdio[3] as Readable;
this.console.log('two way audio opened', data);
(async () => {
const passthrough = new PassThrough();
const url = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/audioData`;
this.console.log('posting audio data to', url);
try {
const open = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/open`;
const { data } = await this.getClient().digestAuth.request({
httpsAgent: hikvisionHttpsAgent,
method: 'PUT',
url: open,
});
this.console.log('two way audio opened', data);
const put = this.getClient().digestAuth.request({
httpsAgent: hikvisionHttpsAgent,
method: 'PUT',
url,
headers: {
'Content-Type': 'application/octet-stream',
// 'Connection': 'close',
'Content-Length': '0'
},
data: passthrough,
});
const url = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/audioData`;
this.console.log('posting audio data to', url);
// seems the dahua doorbells preferred 1024 chunks. should investigate adts
// parsing and sending multipart chunks instead.
this.getClient().digestAuth.request({
httpsAgent: hikvisionHttpsAgent,
method: 'PUT',
url,
headers: {
'Content-Type': 'application/octet-stream',
// 'Connection': 'close',
'Content-Length': '0'
},
data: passthrough,
});
while (true) {
const data = await readLength(socket, 1024);
passthrough.push(data);
}
}
catch (e) {
}
finally {
this.console.log('audio finished');
passthrough.end();
let available = Buffer.alloc(0);
this.activeIntercom?.kill();
const forwarder = this.activeIntercom = await startRtpForwarderProcess(this.console, ffmpegInput, {
audio: {
onRtp: rtp => {
const parsed = RtpPacket.deSerialize(rtp);
available = Buffer.concat([available, parsed.payload]);
if (available.length > 1024) {
passthrough.push(available.subarray(0, 1024));
available = available.subarray(1024);
}
},
codecCopy: codec,
encoderArguments: [
'-ar', '8000',
'-ac', '1',
'-acodec', codec,
]
}
});
forwarder.killPromise.finally(() => {
this.console.log('audio finished');
passthrough.end();
this.stopIntercom();
})();
});
put.finally(() => forwarder.kill());
}
async stopIntercom(): Promise<void> {
this.activeIntercom?.kill();
this.activeIntercom = undefined;
if (this.storage.getItem('twoWayAudio') === 'ONVIF') {
return this.onvifIntercom.stopIntercom();
}

View File

@@ -459,6 +459,10 @@ export class H264Repacketizer {
if (this.shouldFilter(nalType)) {
return false;
}
if (nalType === NAL_TYPE_SPS)
this.updateSps(payload);
if (nalType === NAL_TYPE_PPS)
this.updatePps(payload);
return true;
});
if (depacketized.length === 0) {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/objectdetector",
"version": "0.0.123",
"version": "0.0.132",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/objectdetector",
"version": "0.0.123",
"version": "0.0.132",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/objectdetector",
"version": "0.0.123",
"version": "0.0.132",
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
"author": "Scrypted",
"license": "Apache-2.0",
@@ -35,18 +35,16 @@
"name": "Video Analysis Plugin",
"type": "API",
"interfaces": [
"DeviceProvider",
"Settings",
"MixinProvider",
"DeviceProvider"
"MixinProvider"
],
"realfs": true,
"pluginDependencies": [
"@scrypted/python-codecs"
]
},
"optionalDependencies": {
"sharp": "^0.31.3"
},
"optionalDependencies": {},
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
@@ -58,7 +56,6 @@
"devDependencies": {
"@types/lodash": "^4.14.175",
"@types/node": "^14.17.11",
"@types/semver": "^7.3.13",
"@types/sharp": "^0.31.1"
"@types/semver": "^7.3.13"
}
}

View File

@@ -1,92 +0,0 @@
export class DenoisedDetectionEntry<T> {
id?: string;
boundingBox?: [number, number, number, number];
name: string;
score: number;
detection: T;
firstSeen?: number;
firstBox?: [number, number, number, number];
lastSeen?: number;
lastBox?: [number, number, number, number];
durationGone?: number;
}
export interface DenoisedDetectionOptions<T> {
added?: (detection: DenoisedDetectionEntry<T>) => void;
removed?: (detection: DenoisedDetectionEntry<T>) => void;
retained?: (detection: DenoisedDetectionEntry<T>, previous: DenoisedDetectionEntry<T>) => void;
untracked?: (detection: DenoisedDetectionOptions<T>) => void,
expiring?: (previous: DenoisedDetectionEntry<T>) => void;
timeout?: number;
now?: number;
}
export interface DenoisedDetectionState<T> {
previousDetections?: DenoisedDetectionEntry<T>[];
frameCount?: number;
lastDetection?: number;
// id to time
externallyTracked?: Map<string, DenoisedDetectionEntry<T>>;
}
export function denoiseDetections<T>(state: DenoisedDetectionState<T>,
currentDetections: DenoisedDetectionEntry<T>[],
options?: DenoisedDetectionOptions<T>
) {
if (!state.previousDetections)
state.previousDetections = [];
const now = options.now || Date.now();
const lastDetection = state.lastDetection || now;
const sinceLastDetection = now - lastDetection;
if (!state.externallyTracked)
state.externallyTracked = new Map();
for (const tracked of currentDetections) {
tracked.durationGone = 0;
tracked.lastSeen = now;
tracked.lastBox = tracked.boundingBox;
if (!tracked.id) {
const id = tracked.id = `untracked-${tracked.name}`;
if (!state.externallyTracked.get(id)) {
// crappy track untracked objects for 1 minute.
setTimeout(() => state.externallyTracked.delete(id), 60000);
}
}
let previous = state.externallyTracked.get(tracked.id);
if (previous) {
state.externallyTracked.delete(tracked.id);
tracked.firstSeen = previous.firstSeen;
tracked.firstBox = previous.firstBox;
previous.durationGone = 0;
previous.lastSeen = now;
previous.lastBox = tracked.boundingBox;
options?.retained(tracked, previous);
}
else {
tracked.firstSeen = now;
tracked.firstBox = tracked.lastBox = tracked.boundingBox;
options?.added(tracked);
}
}
for (const previous of state.externallyTracked.values()) {
if (now - previous.lastSeen) {
previous.durationGone += sinceLastDetection;
if (previous.durationGone >= options.timeout) {
options?.expiring(previous);
}
}
}
for (const tracked of currentDetections) {
state.externallyTracked.set(tracked.id, tracked);
}
}

View File

@@ -0,0 +1,169 @@
import { Deferred } from "@scrypted/common/src/deferred";
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
import { readLength, readLine } from "@scrypted/common/src/read-stream";
import sdk, { FFmpegInput, Image, ImageFormat, ImageOptions, MediaObject, ScryptedDeviceBase, ScryptedMimeTypes, VideoFrame, VideoFrameGenerator, VideoFrameGeneratorOptions } from "@scrypted/sdk";
import child_process from 'child_process';
import { Readable } from 'stream';
interface RawFrame {
width: number;
height: number;
data: Buffer;
}
async function createRawImageMediaObject(image: RawImage): Promise<VideoFrame & MediaObject> {
const ret = await sdk.mediaManager.createMediaObject(image, ScryptedMimeTypes.Image, {
format: null,
timestamp: 0,
width: image.width,
height: image.height,
queued: 0,
toBuffer: (options: ImageOptions) => image.toBuffer(options),
toImage: (options: ImageOptions) => image.toImage(options),
flush: async () => { },
});
return ret;
}
class RawImage implements Image, RawFrame {
constructor(public data: Buffer, public width: number, public height: number, public format: ImageFormat) {
}
checkOptions(options: ImageOptions) {
if (options?.resize || options?.crop)
throw new Error('resize and crop are not supported');
if (options?.format && options?.format !== this.format)
throw new Error('format not supported');
}
async toBuffer(options: ImageOptions) {
this.checkOptions(options);
return this.data;
}
async toImage(options: ImageOptions) {
this.checkOptions(options);
return createRawImageMediaObject(this);
}
}
export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements VideoFrameGenerator {
async *generateVideoFramesInternal(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): AsyncGenerator<VideoFrame & MediaObject, any, unknown> {
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(mediaObject, ScryptedMimeTypes.FFmpegInput);
const gray = options?.format === 'gray';
const channels = gray ? 1 : 3;
const format: ImageFormat = gray ? 'gray' : 'rgb';
const vf: string[] = [];
if (options?.fps)
vf.push(`fps=${options.fps}`);
if (options.resize)
vf.push(`scale=${options.resize.width}:${options.resize.height}`);
const args = [
'-hide_banner',
//'-hwaccel', 'auto',
...ffmpegInput.inputArguments,
'-vcodec', 'pam',
'-pix_fmt', gray ? 'gray' : 'rgb24',
...vf.length ? [
'-vf',
vf.join(','),
] : [],
'-f', 'image2pipe',
'pipe:3',
];
// this seems to reduce latency.
// addVideoFilterArguments(args, 'fps=10', 'fps');
const cp = child_process.spawn(await sdk.mediaManager.getFFmpegPath(), args, {
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
});
const console = mediaObject?.sourceId ? sdk.deviceManager.getMixinConsole(mediaObject.sourceId) : this.console;
safePrintFFmpegArguments(console, args);
ffmpegLogInitialOutput(console, cp);
let finished = false;
let frameDeferred: Deferred<RawFrame>;
const reader = async () => {
try {
const readable = cp.stdio[3] as Readable;
const headers = new Map<string, string>();
while (!finished) {
const line = await readLine(readable);
if (line !== 'ENDHDR') {
const [key, value] = line.split(' ');
headers[key] = value;
continue;
}
if (headers['TUPLTYPE'] !== 'RGB' && headers['TUPLTYPE'] !== 'GRAYSCALE')
throw new Error(`Unexpected TUPLTYPE in PAM stream: ${headers['TUPLTYPE']}`);
const width = parseInt(headers['WIDTH']);
const height = parseInt(headers['HEIGHT']);
if (!width || !height)
throw new Error('Invalid dimensions in PAM stream');
const length = width * height * channels;
headers.clear();
const data = await readLength(readable, length);
if (frameDeferred) {
const f = frameDeferred;
frameDeferred = undefined;
f.resolve({
width,
height,
data,
});
}
else {
// this.console.warn('skipped frame');
}
}
}
catch (e) {
}
finally {
console.log('finished reader');
finished = true;
frameDeferred?.reject(new Error('frame generator finished'));
}
}
try {
reader();
while (!finished) {
frameDeferred = new Deferred();
const raw = await frameDeferred.promise;
const { width, height, data } = raw;
const rawImage = new RawImage(data, width, height, format);
try {
const mo = await createRawImageMediaObject(rawImage);
yield mo;
}
finally {
rawImage.data = undefined;
}
}
}
catch (e) {
}
finally {
console.log('finished generator');
finished = true;
safeKillFFmpeg(cp);
}
}
async generateVideoFrames(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): Promise<AsyncGenerator<VideoFrame & MediaObject, any, unknown>> {
return this.generateVideoFramesInternal(mediaObject, options, filter);
}
}

View File

@@ -33,11 +33,13 @@ async function createVipsMediaObject(image: VipsImage): Promise<VideoFrame & Med
timestamp: 0,
width: image.width,
height: image.height,
queued: 0,
toBuffer: (options: ImageOptions) => image.toBuffer(options),
toImage: async (options: ImageOptions) => {
const newImage = await image.toVipsImage(options);
return createVipsMediaObject(newImage);
}
},
flush: async () => {},
});
return ret;

View File

@@ -1,13 +1,13 @@
import sdk, { Camera, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
import { Deferred } from '@scrypted/common/src/deferred';
import { sleep } from '@scrypted/common/src/sleep';
import sdk, { Camera, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionGeneratorResult, ObjectDetectionModel, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import crypto from 'crypto';
import { AutoenableMixinProvider } from "../../../common/src/autoenable-mixin-provider";
import { SettingsMixinDeviceBase } from "../../../common/src/settings-mixin";
import { DenoisedDetectionState } from './denoise';
import { FFmpegVideoFrameGenerator, sharpLib } from './ffmpeg-videoframes';
import { serverSupportsMixinEventMasking } from './server-version';
import { sleep } from './sleep';
import { getAllDevices, safeParseJson } from './util';
import { FFmpegVideoFrameGenerator } from './ffmpeg-videoframes-no-sharp';
const polygonOverlap = require('polygon-overlap');
const insidePolygon = require('point-inside-polygon');
@@ -35,12 +35,6 @@ interface ZoneInfo {
}
type ZoneInfos = { [zone: string]: ZoneInfo };
type TrackedDetection = ObjectDetectionResult & {
newOrBetterDetection?: boolean;
bestScore?: number;
bestSecondPassScore?: number;
};
class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera & MotionSensor & ObjectDetector> implements ObjectDetector, Settings {
motionListener: EventListenerRegister;
motionMixinListener: EventListenerRegister;
@@ -116,11 +110,11 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
zones = this.getZones();
zoneInfos = this.getZoneInfos();
detectionIntervalTimeout: NodeJS.Timeout;
detectionState: DenoisedDetectionState<TrackedDetection> = {};
detectionId: string;
detectorRunning = false;
analyzeStop = 0;
lastDetectionInput = 0;
detectorSignal = new Deferred<void>().resolve();
get detectorRunning() {
return !this.detectorSignal.finished;
}
constructor(public plugin: ObjectDetectionPlugin, mixinDevice: VideoCamera & Camera & MotionSensor & ObjectDetector & Settings, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: { [key: string]: any }, providerNativeId: string, public objectDetection: ObjectDetection & ScryptedDevice, public model: ObjectDetectionModel, group: string, public hasMotionType: boolean, public settings: Setting[]) {
super({
@@ -133,7 +127,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
});
this.cameraDevice = systemManager.getDeviceById<Camera & VideoCamera & MotionSensor & ObjectDetector>(this.id);
this.detectionId = model.name + '-' + this.cameraDevice.id;
this.bindObjectDetection();
this.register();
@@ -151,7 +144,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (this.hasMotionType) {
// force a motion detection restart if it quit
if (this.motionSensorSupplementation === BUILTIN_MOTION_SENSOR_REPLACE)
await this.startPipelineAnalysis();
this.startPipelineAnalysis();
return;
}
}, this.storageSettings.values.detectionInterval * 1000);
@@ -194,11 +187,11 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
if (this.motionSensorSupplementation !== BUILTIN_MOTION_SENSOR_REPLACE)
return;
await this.startPipelineAnalysis();
this.startPipelineAnalysis();
}
endObjectDetection() {
this.detectorRunning = false;
this.detectorSignal.resolve();
}
bindObjectDetection() {
@@ -227,7 +220,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
}
await this.startPipelineAnalysis();
this.startPipelineAnalysis();
});
return;
@@ -245,7 +238,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
if (!this.detectorRunning)
this.console.log('built in motion sensor started motion, starting video detection.');
await this.startPipelineAnalysis();
this.startPipelineAnalysis();
return;
}
@@ -260,20 +253,68 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
}
async startPipelineAnalysis() {
if (this.detectorRunning)
startPipelineAnalysis() {
if (!this.detectorSignal.finished)
return;
this.detectorRunning = true;
this.analyzeStop = Date.now() + this.getDetectionDuration();
const signal = this.detectorSignal = new Deferred();
if (!this.hasMotionType)
this.plugin.objectDetectionStarted(this.console);
const options = {
snapshotPipeline: this.plugin.shouldUseSnapshotPipeline(),
};
this.runPipelineAnalysis(signal, options)
.catch(e => {
this.console.error('Video Analysis ended with error', e);
}).finally(() => {
if (!this.hasMotionType)
this.plugin.objectDetectionEnded(this.console, options.snapshotPipeline);
else
this.console.log('Video Analysis motion detection ended.');
signal.resolve();
});
}
async runPipelineAnalysis(signal: Deferred<void>, options: {
snapshotPipeline: boolean,
}) {
const start = Date.now();
this.analyzeStop = start + this.getDetectionDuration();
let lastStatusTime = Date.now();
let lastStatus = 'starting';
const updatePipelineStatus = (status: string) => {
lastStatus = status;
lastStatusTime = Date.now();
}
let frameGenerator: AsyncGenerator<VideoFrame & MediaObject, void>;
let detectionGenerator: AsyncGenerator<ObjectDetectionGeneratorResult, void>;
const interval = setInterval(() => {
if (Date.now() - lastStatusTime > 30000) {
signal.resolve();
this.console.error('VideoAnalysis is hung and will terminate:', lastStatus);
}
}, 30000);
signal.promise.finally(() => clearInterval(interval));
let newPipeline: string = this.newPipeline;
if (!this.hasMotionType && (!newPipeline || newPipeline === 'Default')) {
if (options.snapshotPipeline) {
newPipeline = 'Snapshot';
this.console.warn(`Due to limited performance, Snapshot mode is being used with ${this.plugin.statsSnapshotConcurrent} actively detecting cameras.`);
}
}
const newPipeline = this.newPipeline;
let generator: () => Promise<AsyncGenerator<VideoFrame & MediaObject>>;
if (newPipeline === 'Snapshot' && !this.hasMotionType) {
options.snapshotPipeline = true;
this.console.log('decoder:', 'Snapshot +', this.objectDetection.name);
const self = this;
generator = async () => (async function* gen() {
frameGenerator = (async function* gen() {
try {
while (self.detectorRunning) {
while (!signal.finished) {
const now = Date.now();
const sleeper = async () => {
const diff = now + 1100 - Date.now();
@@ -282,9 +323,11 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
};
let image: MediaObject & VideoFrame;
try {
updatePipelineStatus('takePicture');
const mo = await self.cameraDevice.takePicture({
reason: 'event',
});
updatePipelineStatus('converting image');
image = await sdk.mediaManager.convertMediaObject(mo, ScryptedMimeTypes.Image);
}
catch (e) {
@@ -294,6 +337,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
// self.console.log('yield')
updatePipelineStatus('processing image');
yield image;
// self.console.log('done yield')
await sleeper();
@@ -307,16 +351,19 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
else {
const destination: MediaStreamDestination = this.hasMotionType ? 'low-resolution' : 'local-recorder';
const videoFrameGenerator = systemManager.getDeviceById<VideoFrameGenerator>(newPipeline);
this.console.log('decoder:', videoFrameGenerator.name);
if (!videoFrameGenerator)
throw new Error('invalid VideoFrameGenerator');
this.console.log(videoFrameGenerator.name, '+', this.objectDetection.name);
updatePipelineStatus('getVideoStream');
const stream = await this.cameraDevice.getVideoStream({
prebuffer: this.model.prebuffer,
destination,
// ask rebroadcast to mute audio, not needed.
audio: null,
});
generator = async () => videoFrameGenerator.generateVideoFrames(stream, {
frameGenerator = await videoFrameGenerator.generateVideoFrames(stream, {
queue: 0,
resize: this.model?.inputSize ? {
width: this.model.inputSize[0],
height: this.model.inputSize[1],
@@ -325,69 +372,71 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
});
}
const start = Date.now();
let detections = 0;
const currentDetections = new Set<string>();
let lastReport = 0;
try {
for await (const detected
of await this.objectDetection.generateObjectDetections(await generator(), {
settings: this.getCurrentSettings(),
sourceId: this.id,
})) {
if (!this.detectorRunning) {
break;
}
if (!this.hasMotionType && Date.now() > this.analyzeStop) {
break;
}
detectionGenerator = await sdk.connectRPCObject(await this.objectDetection.generateObjectDetections(frameGenerator, {
settings: this.getCurrentSettings(),
sourceId: this.id,
}));
// apply the zones to the detections and get a shallow copy list of detections after
// exclusion zones have applied
const zonedDetections = this.applyZones(detected.detected);
detected.detected.detections = zonedDetections;
updatePipelineStatus('waiting result');
detections++;
// this.console.warn('dps', detections / (Date.now() - start) * 1000);
if (!this.hasMotionType) {
for (const d of detected.detected.detections) {
currentDetections.add(d.className);
}
const now = Date.now();
if (now > lastReport + 3000) {
const found = [...currentDetections.values()];
if (!found.length)
found.push('[no detections]');
this.console.log(`[${Math.round((now - start) / 100) / 10}s] Detected:`, ...found);
currentDetections.clear();
lastReport = now;
}
}
if (detected.detected.detectionId) {
const jpeg = await detected.videoFrame.toBuffer({
format: 'jpg',
});
const mo = await sdk.mediaManager.createMediaObject(jpeg, 'image/jpeg');
this.setDetection(detected.detected, mo);
// this.console.log('image saved', detected.detected.detections);
}
this.reportObjectDetections(detected.detected);
if (this.hasMotionType) {
await sleep(250);
}
// this.handleDetectionEvent(detected.detected);
for await (const detected of detectionGenerator) {
if (signal.finished) {
break;
}
if (!this.hasMotionType && Date.now() > this.analyzeStop) {
break;
}
// apply the zones to the detections and get a shallow copy list of detections after
// exclusion zones have applied
const zonedDetections = this.applyZones(detected.detected);
detected.detected.detections = zonedDetections;
// this.console.warn('dps', detections / (Date.now() - start) * 1000);
if (!this.hasMotionType) {
this.plugin.trackDetection();
for (const d of detected.detected.detections) {
currentDetections.add(d.className);
}
const now = Date.now();
if (now > lastReport + 10000) {
const found = [...currentDetections.values()];
if (!found.length)
found.push('[no detections]');
this.console.log(`[${Math.round((now - start) / 100) / 10}s] Detected:`, ...found);
currentDetections.clear();
lastReport = now;
}
}
if (detected.detected.detectionId) {
updatePipelineStatus('creating jpeg');
// const start = Date.now();
const vf = await sdk.connectRPCObject(detected.videoFrame);
const jpeg = await vf.toBuffer({
format: 'jpg',
});
const mo = await sdk.mediaManager.createMediaObject(jpeg, 'image/jpeg');
// this.console.log('retain took', Date.now() -start);
this.setDetection(detected.detected, mo);
// this.console.log('image saved', detected.detected.detections);
}
this.reportObjectDetections(detected.detected);
if (this.hasMotionType) {
// const diff = Date.now() - when;
// when = Date.now();
// this.console.log('sleper', diff);
await sleep(250);
}
updatePipelineStatus('waiting result');
// this.handleDetectionEvent(detected.detected);
}
catch (e) {
this.console.error('video pipeline ended with error', e);
}
finally {
this.console.log('video pipeline analysis ended, dps:', detections / (Date.now() - start) * 1000);
this.endObjectDetection();
}
}
normalizeBox(boundingBox: [number, number, number, number], inputDimensions: [number, number]) {
@@ -473,7 +522,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
copy = copy.filter(c => c !== o);
}
return copy as TrackedDetection[];
return copy;
}
reportObjectDetections(detection: ObjectsDetected) {
@@ -512,7 +561,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (!detection.detectionId)
detection.detectionId = crypto.randomBytes(4).toString('hex');
this.console.log('retaining detection image');
this.console.log('retaining detection image', ...detection.detections);
const { detectionId } = detection;
this.detections.set(detectionId, detectionInput);
@@ -721,9 +770,9 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
if (key === 'analyzeButton') {
this.analyzeStop = Date.now() + 60000;
// await this.snapshotDetection();
await this.startPipelineAnalysis();
this.startPipelineAnalysis();
this.analyzeStop = Date.now() + 60000;
}
else {
const settings = this.getCurrentSettings();
@@ -807,9 +856,17 @@ class ObjectDetectorMixin extends MixinDeviceBase<ObjectDetection> implements Mi
}
}
interface ObjectDetectionStatistics {
dps: number;
sampleTime: number;
}
class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings, DeviceProvider {
currentMixins = new Set<ObjectDetectorMixin>();
objectDetectionStatistics = new Map<number, ObjectDetectionStatistics>();
statsSnapshotTime: number;
statsSnapshotDetections: number;
statsSnapshotConcurrent = 0;
storageSettings = new StorageSettings(this, {
activeMotionDetections: {
title: 'Active Motion Detection Sessions',
@@ -824,12 +881,77 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
title: 'Active Object Detection Sessions',
readonly: true,
mapGet: () => {
// could use the stats variable...
return [...this.currentMixins.values()]
.reduce((c1, v1) => c1 + [...v1.currentMixins.values()]
.reduce((c2, v2) => c2 + (!v2.hasMotionType && v2.detectorRunning ? 1 : 0), 0), 0);
}
},
}
})
});
shouldUseSnapshotPipeline() {
this.pruneOldStatistics();
for (const [k, v] of this.objectDetectionStatistics.entries()) {
// check the stats history to see if any sessions
// with same or lower number of cameras were on the struggle bus.
if (v.dps < 2 && k <= this.statsSnapshotConcurrent)
return true;
}
return false;
}
pruneOldStatistics() {
const now = Date.now();
for (const [k, v] of this.objectDetectionStatistics.entries()) {
// purge the stats every hour
if (Date.now() - v.sampleTime > 60 * 60 * 1000)
this.objectDetectionStatistics.delete(k);
}
}
trackDetection() {
this.statsSnapshotDetections++;
}
objectDetectionStarted(console: Console) {
this.resetStats(console);
this.statsSnapshotConcurrent++;
}
objectDetectionEnded(console: Console, snapshotPipeline: boolean) {
this.resetStats(console, snapshotPipeline);
this.statsSnapshotConcurrent--;
}
resetStats(console: Console, snapshotPipeline?: boolean) {
const now = Date.now();
const concurrentSessions = this.statsSnapshotConcurrent;
if (concurrentSessions) {
const duration = now - this.statsSnapshotTime;
const stats: ObjectDetectionStatistics = {
sampleTime: now,
dps: this.statsSnapshotDetections / (duration / 1000),
};
// ignore short sessions and sessions with no detections (busted?).
// also ignore snapshot sessions because that will skew/throttle the stats used
// to determine system dps capabilities.
if (duration > 10000 && this.statsSnapshotDetections && !snapshotPipeline)
this.objectDetectionStatistics.set(concurrentSessions, stats);
this.pruneOldStatistics();
const str = `video analysis, ${concurrentSessions} camera(s), dps: ${Math.round(stats.dps * 10) / 10} (${this.statsSnapshotDetections}/${Math.round(duration / 1000)})`;
this.console.log(str);
console?.log(str);
}
this.statsSnapshotDetections = 0;
this.statsSnapshotTime = now;
}
constructor(nativeId?: ScryptedNativeId) {
super(nativeId);
@@ -840,9 +962,9 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
{
name: 'FFmpeg Frame Generator',
type: ScryptedDeviceType.Builtin,
interfaces: sharpLib ? [
interfaces: [
ScryptedInterface.VideoFrameGenerator,
] : [],
],
nativeId: 'ffmpeg',
}
]

View File

@@ -1,3 +0,0 @@
export function sleep(ms: number) {
return new Promise(resolve => setTimeout(resolve, ms));
}

View File

@@ -1,18 +1,17 @@
{
"name": "@scrypted/onvif",
"version": "0.0.119",
"version": "0.0.120",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/onvif",
"version": "0.0.119",
"version": "0.0.120",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^16.9.1",
"base-64": "^1.0.0",
"http-auth-utils": "^3.0.2",
"md5": "^2.3.0",
@@ -21,6 +20,7 @@
},
"devDependencies": {
"@types/md5": "^2.3.1",
"@types/node": "^18.15.11",
"@types/xml2js": "^0.4.9"
}
},
@@ -65,7 +65,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.68",
"version": "0.2.87",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -130,9 +130,10 @@
}
},
"node_modules/@types/node": {
"version": "16.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
"dev": true
},
"node_modules/@types/xml2js": {
"version": "0.4.9",
@@ -328,9 +329,10 @@
}
},
"@types/node": {
"version": "16.9.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
"dev": true
},
"@types/xml2js": {
"version": "0.4.9",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/onvif",
"version": "0.0.119",
"version": "0.0.120",
"description": "ONVIF Camera Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -39,7 +39,6 @@
"@koush/axios-digest-auth": "^0.8.5",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^16.9.1",
"base-64": "^1.0.0",
"http-auth-utils": "^3.0.2",
"md5": "^2.3.0",
@@ -48,6 +47,7 @@
},
"devDependencies": {
"@types/md5": "^2.3.1",
"@types/node": "^18.15.11",
"@types/xml2js": "^0.4.9"
}
}

View File

@@ -35,7 +35,7 @@ function stripNamespaces(topic: string) {
let parts = topic.split('/')
for (let index = 0; index < parts.length; index++) {
let stringNoNamespace = parts[index].split(':').pop() // split on :, then return the last item in the array
if (output.length == 0) {
if (output.length === 0) {
output += stringNoNamespace
} else {
output += '/' + stringNoNamespace
@@ -92,9 +92,18 @@ export class OnvifCameraAPI {
else
ret.emit('event', OnvifEvent.AudioStop)
}
// Reolink
else if (eventTopic.includes('Visitor') && (dataValue === true || dataValue === false)) {
if (dataValue) {
ret.emit('event', OnvifEvent.BinaryStart)
}
else {
ret.emit('event', OnvifEvent.BinaryStop)
}
}
// Mobotix T26
else if (eventTopic.includes('VideoSource/Alarm')) {
if (dataValue == "Ring" || dataValue == "CameraBellButton") {
if (dataValue === "Ring" || dataValue === "CameraBellButton") {
ret.emit('event', OnvifEvent.BinaryRingEvent);
}
}
@@ -155,7 +164,7 @@ export class OnvifCameraAPI {
this.console.log('supportsEvents error', err);
return reject(err);
}
if (!err && data.events && data.events.WSPullPointSupport && data.events.WSPullPointSupport == true) {
if (!err && data.events && data.events.WSPullPointSupport && data.events.WSPullPointSupport === true) {
this.console.log('Camera supports WSPullPoint', xml);
} else {
this.console.log('Camera does not show WSPullPoint support, but trying anyway', xml);

View File

@@ -1,11 +1,13 @@
import sdk, { MediaObject, Intercom, FFmpegInput, ScryptedMimeTypes } from "@scrypted/sdk";
import { RtspSmartCamera } from "../../rtsp/src/rtsp";
import { parseSemicolonDelimited, RtspClient } from "@scrypted/common/src/rtsp-server";
import { createBindZero } from "@scrypted/common/src/listen-cluster";
import { RtspClient, parseSemicolonDelimited } from "@scrypted/common/src/rtsp-server";
import { parseSdp } from "@scrypted/common/src/sdp-utils";
import { ffmpegLogInitialOutput, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
import child_process from 'child_process';
import { createBindZero, reserveUdpPort } from "@scrypted/common/src/listen-cluster";
import sdk, { FFmpegInput, Intercom, MediaObject, ScryptedMimeTypes } from "@scrypted/sdk";
import crypto from 'crypto';
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
import { nextSequenceNumber } from "../../homekit/src/types/camera/jitter-buffer";
import { RtspSmartCamera } from "../../rtsp/src/rtsp";
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
const { mediaManager } = sdk;
@@ -80,11 +82,11 @@ export class OnvifIntercom implements Intercom {
const url = new URL(this.url);
url.username = username;
url.password = password;
this.intercomClient = new RtspClient(url.toString());
this.intercomClient.console = this.camera.console;
await this.intercomClient.options();
const intercomClient = this.intercomClient = new RtspClient(url.toString());
intercomClient.console = this.camera.console;
await intercomClient.options();
const describe = await this.intercomClient.describe({
const describe = await intercomClient.describe({
Require,
});
this.camera.console.log('ONVIF Backchannel SDP:');
@@ -94,31 +96,35 @@ export class OnvifIntercom implements Intercom {
if (!audioBackchannel)
throw new Error('ONVIF audio backchannel not found');
return audioBackchannel;
return { audioBackchannel, intercomClient };
}
async startIntercom(media: MediaObject) {
const ffmpegInput = await mediaManager.convertMediaObjectToJSON<FFmpegInput>(media, ScryptedMimeTypes.FFmpegInput);
await this.stopIntercom();
const audioBackchannel = await this.checkIntercom();
const { audioBackchannel, intercomClient } = await this.checkIntercom();
if (!audioBackchannel)
throw new Error('ONVIF audio backchannel not found');
const rtp = await reserveUdpPort();
const rtpServer = await createBindZero('udp4');
const rtp = rtpServer.port;
const rtcp = rtp + 1;
let ip: string;
let serverRtp: number;
let transportDict: ReturnType<typeof parseSemicolonDelimited>;
let tcp = false;
try {
const headers: any = {
Require,
Transport: `RTP/AVP;unicast;client_port=${rtp}-${rtcp}`,
};
const response = await this.intercomClient.request('SETUP', headers, audioBackchannel.control);
const response = await intercomClient.request('SETUP', headers, audioBackchannel.control);
transportDict = parseSemicolonDelimited(response.headers.transport);
this.intercomClient.session = response.headers.session.split(';')[0];
intercomClient.session = response.headers.session.split(';')[0];
ip = this.camera.getIPAddress();
const { server_port } = transportDict;
@@ -126,6 +132,7 @@ export class OnvifIntercom implements Intercom {
serverRtp = parseInt(serverPorts[0]);
}
catch (e) {
tcp = true;
this.camera.console.error('onvif udp backchannel failed, falling back to tcp', e);
const headers: any = {
@@ -133,21 +140,19 @@ export class OnvifIntercom implements Intercom {
Transport: `RTP/AVP/TCP;unicast;interleaved=0-1`,
};
const response = await this.intercomClient.request('SETUP', headers, audioBackchannel.control);
const response = await intercomClient.request('SETUP', headers, audioBackchannel.control);
transportDict = parseSemicolonDelimited(response.headers.transport);
this.intercomClient.session = response.headers.session.split(';')[0];
intercomClient.session = response.headers.session.split(';')[0];
ip = '127.0.0.1';
const server = await createBindZero('udp4');
this.intercomClient.client.on('close', () => server.server.close());
intercomClient.client.on('close', () => server.server.close());
serverRtp = server.port;
server.server.on('message', data => {
this.intercomClient.send(data, 0);
intercomClient.send(data, 0);
});
}
this.camera.console.log('backchannel transport', transportDict);
const ffmpegInput = await mediaManager.convertMediaObjectToJSON<FFmpegInput>(media, ScryptedMimeTypes.FFmpegInput);
const availableCodecs = [...parseCodecs(audioBackchannel.contents)];
let match: CodecMatch;
let codec: SupportedCodec;
@@ -171,27 +176,69 @@ export class OnvifIntercom implements Intercom {
}
// ffmpeg expects ssrc as signed int32.
const ssrc = ssrcBuffer.readInt32BE(0);
const ssrcUnsigned = ssrcBuffer.readUint32BE(0);
const args = [
'-hide_banner',
...ffmpegInput.inputArguments,
'-vn',
'-acodec', codec.ffmpegCodec,
'-ar', match.sampleRate,
'-ac', match.channels || '1',
"-payload_type", match.payloadType,
"-ssrc", ssrc.toString(),
'-f', 'rtp',
`rtp://${ip}:${serverRtp}?localrtpport=${rtp}&localrtcpport=${rtcp}`,
];
safePrintFFmpegArguments(this.camera.console, args);
const cp = child_process.spawn(await mediaManager.getFFmpegPath(), args);
const payloadType = parseInt(match.payloadType);
ffmpegLogInitialOutput(this.camera.console, cp);
await this.intercomClient.play({
await intercomClient.play({
Require,
});
let pending: RtpPacket;
let seqNumber = 0;
const forwarder = await startRtpForwarderProcess(console, ffmpegInput, {
audio: {
onRtp: (rtp) => {
// if (true) {
// const p = RtpPacket.deSerialize(rtp);
// p.header.payloadType = payloadType;
// p.header.ssrc = ssrcUnsigned;
// p.header.marker = true;
// rtpServer.server.send(p.serialize(), serverRtp, ip);
// return;
// }
const p = RtpPacket.deSerialize(rtp);
if (!pending) {
pending = p;
return;
}
if (pending.payload.length + p.payload.length < 1024) {
pending.payload = Buffer.concat([pending.payload, p.payload]);
return;
}
pending.header.payloadType = payloadType;
pending.header.ssrc = ssrcUnsigned;
pending.header.sequenceNumber = seqNumber;
seqNumber = nextSequenceNumber(seqNumber);
pending.header.marker = true;
if (!tcp)
rtpServer.server.send(pending.serialize(), serverRtp, ip);
else
intercomClient.send(pending.serialize(), 0);
pending = p;
},
codecCopy: codec.ffmpegCodec,
payloadType,
ssrc,
packetSize: 1024,
encoderArguments: [
'-acodec', codec.ffmpegCodec,
'-ar', match.sampleRate,
'-ac', match.channels || '1',
],
}
});
intercomClient.client.on('close', () => forwarder.kill());
forwarder.killPromise.finally(() => intercomClient?.client.destroy());
this.camera.console.log('intercom playing');
}

View File

@@ -9,7 +9,7 @@ import imutils
import numpy as np
import scrypted_sdk
from PIL import Image
from scrypted_sdk.types import (ObjectDetectionGeneratorSession,
from scrypted_sdk.types import (ObjectDetectionGeneratorSession,ObjectDetectionSession,
ObjectDetectionResult, ObjectsDetected,
Setting, VideoFrame)
@@ -116,35 +116,37 @@ class OpenCVPlugin(DetectPlugin):
blur = int(settings.get('blur', blur))
return area, threshold, interval, blur
def detect(self, frame, settings: Any, detection_session: OpenCVDetectionSession, src_size, convert_to_src_size) -> ObjectsDetected:
def detect(self, frame, detection_session: ObjectDetectionSession, src_size, convert_to_src_size) -> ObjectsDetected:
session: OpenCVDetectionSession = detection_session['settings']['session']
settings = detection_session and detection_session.get('settings', None)
area, threshold, interval, blur = self.parse_settings(settings)
gray = frame
detection_session.curFrame = cv2.GaussianBlur(
gray, (blur, blur), 0, dst=detection_session.curFrame)
session.curFrame = cv2.GaussianBlur(
gray, (blur, blur), 0, dst=session.curFrame)
detections: List[ObjectDetectionResult] = []
detection_result: ObjectsDetected = {}
detection_result['detections'] = detections
detection_result['inputDimensions'] = src_size
if detection_session.previous_frame is None:
detection_session.previous_frame = detection_session.curFrame
detection_session.curFrame = None
if session.previous_frame is None:
session.previous_frame = session.curFrame
session.curFrame = None
return detection_result
detection_session.frameDelta = cv2.absdiff(
detection_session.previous_frame, detection_session.curFrame, dst=detection_session.frameDelta)
tmp = detection_session.curFrame
detection_session.curFrame = detection_session.previous_frame
detection_session.previous_frame = tmp
session.frameDelta = cv2.absdiff(
session.previous_frame, session.curFrame, dst=session.frameDelta)
tmp = session.curFrame
session.curFrame = session.previous_frame
session.previous_frame = tmp
_, detection_session.thresh = cv2.threshold(
detection_session.frameDelta, threshold, 255, cv2.THRESH_BINARY, dst=detection_session.thresh)
detection_session.dilated = cv2.dilate(
detection_session.thresh, None, iterations=2, dst=detection_session.dilated)
_, session.thresh = cv2.threshold(
session.frameDelta, threshold, 255, cv2.THRESH_BINARY, dst=session.thresh)
session.dilated = cv2.dilate(
session.thresh, None, iterations=2, dst=session.dilated)
fcontours = cv2.findContours(
detection_session.dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
session.dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
contours = imutils.grab_contours(fcontours)
@@ -205,24 +207,16 @@ class OpenCVPlugin(DetectPlugin):
detection_session.cap = None
return super().end_session(detection_session)
async def generateObjectDetections(self, videoFrames: Any, session: ObjectDetectionGeneratorSession = None) -> Any:
try:
ds = OpenCVDetectionSession()
videoFrames = await scrypted_sdk.sdk.connectRPCObject(videoFrames)
async for videoFrame in videoFrames:
detected = await self.run_detection_videoframe(videoFrame, session and session.get('settings'), ds)
yield {
'__json_copy_serialize_children': True,
'detected': detected,
'videoFrame': videoFrame,
}
finally:
try:
await videoFrames.aclose()
except:
pass
async def generateObjectDetections(self, videoFrames: Any, detection_session: ObjectDetectionGeneratorSession = None) -> Any:
if not detection_session:
detection_session = {}
if not detection_session.get('settings'):
detection_session['settings'] = {}
settings = detection_session['settings']
settings['session'] = OpenCVDetectionSession()
return super().generateObjectDetections(videoFrames, detection_session)
async def run_detection_videoframe(self, videoFrame: VideoFrame, settings: Any, detection_session: OpenCVDetectionSession) -> ObjectsDetected:
async def run_detection_videoframe(self, videoFrame: VideoFrame, detection_session: ObjectDetectionSession) -> ObjectsDetected:
width = videoFrame.width
height = videoFrame.height
@@ -267,5 +261,5 @@ class OpenCVPlugin(DetectPlugin):
def convert_to_src_size(point):
return point[0] * scale, point[1] * scale
mat = np.ndarray((height, width, 1), buffer=buffer, dtype=np.uint8)
detections = self.detect(mat, settings, detection_session, (videoFrame.width, videoFrame.height), convert_to_src_size)
detections = self.detect(mat, detection_session, (videoFrame.width, videoFrame.height), convert_to_src_size)
return detections

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/pam-diff",
"version": "0.0.20",
"version": "0.0.21",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/pam-diff",
"version": "0.0.20",
"version": "0.0.21",
"hasInstallScript": true,
"dependencies": {
"@types/node": "^16.6.1",

View File

@@ -43,5 +43,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.20"
"version": "0.0.21"
}

View File

@@ -1,54 +1,15 @@
import sdk, { FFmpegInput, MediaObject, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionGeneratorResult, ObjectDetectionGeneratorSession, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionSession, ObjectsDetected, ScryptedDeviceBase, ScryptedInterface, ScryptedMimeTypes, VideoFrame } from '@scrypted/sdk';
import child_process, { ChildProcess } from 'child_process';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "../../../common/src/media-helpers";
import sdk, { MediaObject, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionGeneratorResult, ObjectDetectionGeneratorSession, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionSession, ObjectsDetected, ScryptedDeviceBase, VideoFrame } from '@scrypted/sdk';
import PD from 'pam-diff';
import P2P from 'pipe2pam';
import { PassThrough, Writable } from 'stream';
const { mediaManager } = sdk;
const defaultDifference = 9;
const defaultPercentage = 2;
interface PamDiffSession {
id: string;
timeout?: NodeJS.Timeout;
cp?: ChildProcess;
pamDiff?: any;
callbacks: ObjectDetectionCallbacks;
}
class PamDiff extends ScryptedDeviceBase implements ObjectDetection {
sessions = new Map<string, PamDiffSession>();
endSession(id: string) {
const pds = this.sessions.get(id);
if (!pds)
return;
this.sessions.delete(pds.id);
const event: ObjectsDetected = {
timestamp: Date.now(),
running: false,
detectionId: pds.id,
}
clearTimeout(pds.timeout);
safeKillFFmpeg(pds.cp);
if (pds.callbacks) {
pds.callbacks.onDetectionEnded(event);
}
else {
this.onDeviceEvent(ScryptedInterface.ObjectDetection, event);
}
}
reschedule(id: string, duration: number,) {
const pds = this.sessions.get(id);
if (!pds)
return;
clearTimeout(pds.timeout);
pds.timeout = setTimeout(() => this.endSession(id), duration);
}
async * generateObjectDetectionsInternal(videoFrames: AsyncGenerator<VideoFrame, any, unknown>, session: ObjectDetectionGeneratorSession): AsyncGenerator<ObjectDetectionGeneratorResult, any, unknown> {
videoFrames = await sdk.connectRPCObject(videoFrames);
@@ -92,7 +53,6 @@ class PamDiff extends ScryptedDeviceBase implements ObjectDetection {
}
const event: ObjectsDetected = {
timestamp: Date.now(),
running: true,
inputDimensions: [width, height],
detections,
}
@@ -111,10 +71,10 @@ ENDHDR
`;
const buffer = await videoFrame.toBuffer({
resize: {
resize: (videoFrame.width !== width || videoFrame.height !== height) ? {
width,
height,
},
} : undefined,
format: 'rgb',
});
pt.write(Buffer.from(header));
@@ -146,157 +106,7 @@ ENDHDR
}
async detectObjects(mediaObject: MediaObject, session?: ObjectDetectionSession, callbacks?: ObjectDetectionCallbacks): Promise<ObjectsDetected> {
if (mediaObject && mediaObject.mimeType?.startsWith('image/'))
throw new Error('can not run motion detection on image')
let { detectionId } = session;
let pds = this.sessions.get(detectionId);
if (pds)
pds.callbacks = callbacks;
if (!session?.duration) {
this.endSession(detectionId);
return {
detectionId,
running: false,
timestamp: Date.now(),
}
}
if (pds) {
this.reschedule(detectionId, session.duration);
pds.pamDiff.setDifference(session.settings?.difference || defaultDifference).setPercent(session.settings?.percent || defaultPercentage);
return {
detectionId,
running: true,
timestamp: Date.now(),
};
}
// unable to start/extend this session.
if (!mediaObject) {
this.endSession(detectionId);
return {
detectionId,
running: false,
timestamp: Date.now(),
}
}
const ffmpeg = await mediaManager.getFFmpegPath();
const ffmpegInput: FFmpegInput = JSON.parse((await mediaManager.convertMediaObjectToBuffer(
mediaObject,
ScryptedMimeTypes.FFmpegInput
)).toString());
pds = {
id: detectionId,
callbacks,
}
this.reschedule(detectionId, session.duration);
const args = ffmpegInput.inputArguments.slice();
args.unshift(
'-hide_banner',
...ffmpegInput.videoDecoderArguments || [],
)
args.push(
'-an', '-dn',
'-c:v',
'pam',
'-pix_fmt',
'rgb24',
'-f',
'image2pipe',
'-vf',
`fps=2,scale=640:360`,
'pipe:3',
);
const p2p = new P2P();
const pamDiff = new PD({
difference: session.settings?.difference || defaultDifference,
percent: session.settings?.percent || defaultPercentage,
response: session?.settings?.motionAsObjects ? 'blobs' : 'percent',
});
pamDiff.on('diff', async (data: any) => {
const trigger = data.trigger[0];
// console.log(trigger.blobs.length);
const { blobs } = trigger;
const detections: ObjectDetectionResult[] = [];
if (blobs?.length) {
for (const blob of blobs) {
detections.push(
{
className: 'motion',
score: 1,
boundingBox: [blob.minX, blob.minY, blob.maxX - blob.minX, blob.maxY - blob.minY],
}
)
}
}
else {
detections.push(
{
className: 'motion',
score: 1,
}
)
}
const event: ObjectsDetected = {
timestamp: Date.now(),
running: true,
detectionId: pds.id,
inputDimensions: [640, 360],
detections,
}
if (pds.callbacks) {
pds.callbacks.onDetection(event);
}
else {
this.onDeviceEvent(ScryptedInterface.ObjectDetection, event);
}
});
const console = sdk.deviceManager.getMixinConsole(mediaObject.sourceId, this.nativeId);
pds.pamDiff = pamDiff;
pds.pamDiff
.setDifference(session.settings?.difference || defaultDifference)
.setPercent(session.settings?.percent || defaultPercentage)
.setResponse(session?.settings?.motionAsObjects ? 'blobs' : 'percent');;
safePrintFFmpegArguments(console, args);
pds.cp = child_process.spawn(ffmpeg, args, {
stdio: ['inherit', 'pipe', 'pipe', 'pipe']
});
let pamTimeout: NodeJS.Timeout;
const resetTimeout = () => {
clearTimeout(pamTimeout);
pamTimeout = setTimeout(() => {
const check = this.sessions.get(detectionId);
if (check !== pds)
return;
console.error('PAM image stream timed out. Ending session.');
this.endSession(detectionId);
}, 60000);
}
p2p.on('data', () => {
resetTimeout();
})
resetTimeout();
pds.cp.stdio[3].pipe(p2p as any).pipe(pamDiff as any);
pds.cp.on('exit', () => this.endSession(detectionId));
ffmpegLogInitialOutput(console, pds.cp);
this.sessions.set(detectionId, pds);
return {
detectionId,
running: true,
timestamp: Date.now(),
}
throw new Error('can not run motion detection on image')
}
async getDetectionModel(): Promise<ObjectDetectionModel> {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.30",
"version": "0.1.35",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/python-codecs",
"version": "0.1.30",
"version": "0.1.35",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.30",
"version": "0.1.35",
"description": "Python Codecs for Scrypted",
"keywords": [
"scrypted",

View File

@@ -153,9 +153,12 @@ class CodecFork:
raise
finally:
print('libav finished after %s' % (time.time() - start))
import os
os._exit(os.EX_OK)
pass
import sys
if sys.platform == 'win32':
sys.exit()
else:
import os
os._exit(os.EX_OK)
async def fork():

View File

@@ -34,6 +34,7 @@ class PILImage(scrypted_sdk.VideoFrame):
finally:
rgb.close()
return await to_thread(format)
# TODO: gray...
def save():
bytesArray = io.BytesIO()

View File

@@ -22,18 +22,30 @@ class VipsImage(scrypted_sdk.VideoFrame):
def format():
return memoryview(vipsImage.vipsImage.write_to_memory())
return await to_thread(format)
elif options['format'] == 'rgba':
def format():
if not vipsImage.vipsImage.hasalpha():
rgba = vipsImage.vipsImage.addalpha()
else:
rgba = vipsImage.vipsImage
return memoryview(rgba.write_to_memory())
return await to_thread(format)
elif options['format'] == 'rgb':
def format():
if vipsImage.vipsImage.hasalpha():
rgb = vipsImage.vipsImage.extract_band(0, n=vipsImage.vipsImage.bands - 1)
else:
rgb = vipsImage.vipsImage
mem = memoryview(rgb.write_to_memory())
return mem
return memoryview(rgb.write_to_memory())
return await to_thread(format)
elif options['format'] == 'gray':
def format():
return memoryview(vipsImage.vipsImage.write_to_memory())
if vipsImage.vipsImage.bands == 1:
def format():
return memoryview(vipsImage.vipsImage.write_to_memory())
else:
def format():
gray = vipsImage.vipsImage.colourspace("b-w")
return memoryview(gray.write_to_memory())
return await to_thread(format)
return await to_thread(lambda: vipsImage.vipsImage.write_to_buffer('.' + options['format']))

View File

@@ -1,5 +1,9 @@
# Reolink Plugin for Scrypted
<span style="color:red">Reolink cameras should use the ONVIF plugin. This plugin is for older Reolink cameras that do not have ONVIF support or the ONVIF implementation is buggy.</span>
<span style="color:red">Reolink doorbells MUST use the ONVIF plugin. This plugin does not support two way audio or the doorbell button event.</span>
Reolink Cameras offer both RTMP and RTSP streams. RTMP streams are more reliable than RTSP on Reolink Cameras, but Scrypted highly recommends using RTSP streams if they are stable on your Reolink hardware. RTMP streams will be preferred by default. The defaults can be changed in the camera's Rebroadcast `Stream Management` settings.
Reolink Two Way Audio is not supported. It is a proprietary and undocumented protocol.

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/reolink",
"version": "0.0.21",
"version": "0.0.22",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/reolink",
"version": "0.0.21",
"version": "0.0.22",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/reolink",
"version": "0.0.21",
"version": "0.0.22",
"description": "Reolink Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -51,9 +51,11 @@ class ReolinkCamera extends RtspSmartCamera implements Camera {
(async () => {
while (!killed) {
try {
// const ai = await client.getAiState();
// ret.emit('data', JSON.stringify(ai));
const { value, data } = await client.getMotionState();
this.motionDetected = value;
ret.emit('data', data);
ret.emit('data', JSON.stringify(data));
}
catch (e) {
ret.emit('error', e);
@@ -116,14 +118,17 @@ class ReolinkCamera extends RtspSmartCamera implements Camera {
}
// rough guesses for rebroadcast stream selection.
ret[0].container = 'rtmp';
ret[0].video = {
width: 2560,
height: 1920,
}
ret[1].container = 'rtmp';
ret[1].video = {
width: 896,
height: 672,
}
ret[2].container = 'rtmp';
ret[2].video = {
width: 640,
height: 480,
@@ -147,6 +152,28 @@ class ReolinkCamera extends RtspSmartCamera implements Camera {
});
}
// rough guesses for h264
ret[3].container = 'rtsp';
ret[3].video = {
codec: 'h264',
width: 2560,
height: 1920,
}
ret[4].container = 'rtsp';
ret[4].video = {
codec: 'h264',
width: 896,
height: 672,
}
ret[5].container = 'rtsp';
ret[5].video = {
codec: 'h265',
width: 896,
height: 672,
}
return ret;
}

View File

@@ -6,7 +6,7 @@ export const reolinkHttpsAgent = new https.Agent({
});
export async function getMotionState(digestAuth: AxiosDigestAuth, username: string, password: string, address: string, channelId: number) {
const url = new URL(`http://${address}/cgi-bin/api.cgi`);
const url = new URL(`http://${address}/api.cgi`);
const params = url.searchParams;
params.set('cmd', 'GetMdState');
params.set('channel', channelId.toString());

View File

@@ -26,6 +26,23 @@ export class ReolinkCameraClient {
return getMotionState(this.digestAuth, this.username, this.password, this.host, this.channelId);
}
async getAiState() {
const url = new URL(`http://${this.host}/api.cgi`);
const params = url.searchParams;
params.set('cmd', 'GetAiState');
params.set('channel', this.channelId.toString());
params.set('user', this.username);
params.set('password', this.password);
const response = await this.digestAuth.request({
url: url.toString(),
httpsAgent: reolinkHttpsAgent,
});
return {
value: !!response.data?.[0]?.value?.state,
data: response.data,
};
}
async jpegSnapshot() {
const url = new URL(`http://${this.host}/cgi-bin/api.cgi`);
const params = url.searchParams;

View File

@@ -1,6 +0,0 @@
.DS_Store
out/
node_modules/
dist/
.venv
all_models*

View File

@@ -1,15 +0,0 @@
.DS_Store
out/
node_modules/
*.map
fs
src
.vscode
dist/*.js
dist/*.txt
__pycache__
all_models
sort_oh
download_models.sh
tsconfig.json
.venv

View File

@@ -1,30 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Scrypted Debugger",
"type": "python",
"request": "attach",
"connect": {
"host": "${config:scrypted.debugHost}",
"port": 10081
},
"justMyCode": false,
"preLaunchTask": "scrypted: deploy+debug",
"pathMappings": [
{
"localRoot": "/Volumes/Dev/scrypted/server/python/",
"remoteRoot": "/Volumes/Dev/scrypted/server/python/",
},
{
"localRoot": "${workspaceFolder}/src",
"remoteRoot": "${config:scrypted.pythonRemoteRoot}"
},
]
}
]
}

View File

@@ -1,21 +0,0 @@
{
// docker installation
// "scrypted.debugHost": "koushik-thin",
// "scrypted.serverRoot": "/server",
// pi local installation
// "scrypted.debugHost": "192.168.2.119",
// "scrypted.serverRoot": "/home/pi/.scrypted",
// local checkout
"scrypted.debugHost": "127.0.0.1",
"scrypted.serverRoot": "/Users/koush/.scrypted",
// "scrypted.debugHost": "koushik-windows",
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
"scrypted.pythonRemoteRoot": "${config:scrypted.serverRoot}/volume/plugin.zip",
"python.analysis.extraPaths": [
"./node_modules/@scrypted/sdk/types/scrypted_python"
]
}

View File

@@ -1,20 +0,0 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "scrypted: deploy+debug",
"type": "shell",
"presentation": {
"echo": true,
"reveal": "silent",
"focus": false,
"panel": "shared",
"showReuseMessage": true,
"clear": false
},
"command": "npm run scrypted-vscode-launch ${config:scrypted.debugHost}",
},
]
}

View File

@@ -1,3 +0,0 @@
# SORT Object Tracker for Scrypted
This provides object tracking capabilities for the Video Analysis plugin.

View File

@@ -1,86 +0,0 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.0.3",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.0.3",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.68",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
},
"bin": {
"scrypted-changelog": "bin/scrypted-changelog.js",
"scrypted-debug": "bin/scrypted-debug.js",
"scrypted-deploy": "bin/scrypted-deploy.js",
"scrypted-deploy-debug": "bin/scrypted-deploy-debug.js",
"scrypted-package-json": "bin/scrypted-package-json.js",
"scrypted-setup-project": "bin/scrypted-setup-project.js",
"scrypted-webpack": "bin/scrypted-webpack.js"
},
"devDependencies": {
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"stringify-object": "^3.3.0",
"ts-node": "^10.4.0",
"typedoc": "^0.23.21"
}
},
"../sdk": {
"extraneous": true
},
"node_modules/@scrypted/sdk": {
"resolved": "../../sdk",
"link": true
}
},
"dependencies": {
"@scrypted/sdk": {
"version": "file:../../sdk",
"requires": {
"@babel/preset-typescript": "^7.18.6",
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"stringify-object": "^3.3.0",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"ts-node": "^10.4.0",
"typedoc": "^0.23.21",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
}
}
}
}

View File

@@ -1,43 +0,0 @@
{
"name": "@scrypted/sort-tracker",
"description": "Scrypted SORT Object Tracker",
"keywords": [
"scrypted",
"plugin",
"coral",
"tpu",
"edge",
"motion",
"object",
"detect",
"detection",
"people",
"person"
],
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",
"build": "scrypted-webpack",
"prepublishOnly": "NODE_ENV=production scrypted-webpack",
"prescrypted-vscode-launch": "scrypted-webpack",
"scrypted-vscode-launch": "scrypted-deploy-debug",
"scrypted-deploy-debug": "scrypted-deploy-debug",
"scrypted-debug": "scrypted-debug",
"scrypted-deploy": "scrypted-deploy",
"scrypted-readme": "scrypted-readme",
"scrypted-package-json": "scrypted-package-json"
},
"scrypted": {
"name": "SORT Object Tracker",
"runtime": "python",
"type": "API",
"interfaces": [
"Settings",
"ObjectTracker"
]
},
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.3"
}

View File

@@ -1,66 +0,0 @@
from __future__ import annotations
from sort_oh import tracker
import scrypted_sdk
from scrypted_sdk.types import (ObjectDetectionResult)
import numpy as np
from rectangle import Rectangle, intersect_area
def create_scrypted_plugin():
return SortOHTracker()
class SortOHTracker(scrypted_sdk.ObjectTracker):
def __init__(self) -> None:
super().__init__()
self.trackers = {}
def trackObjects(self, ret: scrypted_sdk.ObjectsDetected):
detections = ret['detections']
id = ret['detectionId']
detectionTracker = self.trackers.get(id)
iw, ih = ret['inputDimensions']
if not detectionTracker:
detectionTracker = tracker.Sort_OH(scene=np.array([iw, ih]))
# t.conf_three_frame_certainty = (settings.get('trackerCertainty') or .2) * 3
# t.conf_unmatched_history_size = settings.get('trackerWindow') or 3
self.trackers[id] = detectionTracker
sort_input = []
for d in detections:
r: ObjectDetectionResult = d
l, t, w, h = r['boundingBox']
sort_input.append([l, t, l + w, t + h, r['score']])
trackers, unmatched_trckr, unmatched_gts = detectionTracker.update(
np.array(sort_input), [])
for td in trackers:
x0, y0, x1, y1, trackID = td[0].item(), td[1].item(
), td[2].item(), td[3].item(), td[4].item()
slop = 0
obj: ObjectDetectionResult = None
ta = (x1 - x0) * (y1 - y0)
box = Rectangle(x0, y0, x1, y1)
for d in detections:
if d.get('id'):
continue
ob: ObjectDetectionResult = d
dx0, dy0, dw, dh = ob['boundingBox']
dx1 = dx0 + dw
dy1 = dy0 + dh
da = dw * dh
area = intersect_area(Rectangle(dx0, dy0, dx1, dy1), box)
if not area:
continue
# intersect area always gonna be smaller than
# the detection or tracker area.
# greater numbers, ie approaching 2, is better.
dslop = area / ta + area / da
if (dslop > slop):
slop = dslop
obj = ob
if obj:
obj['id'] = str(trackID)
return ret

View File

@@ -1 +0,0 @@
../../tensorflow-lite/src/predict/rectangle.py

View File

@@ -1,3 +0,0 @@
numpy>=1.16.2
scipy
filterpy

View File

@@ -1 +0,0 @@
../sort_oh/libs/

View File

@@ -1,13 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "ES2021",
"resolveJsonModule": true,
"moduleResolution": "Node16",
"esModuleInterop": true,
"sourceMap": true
},
"include": [
"src/**/*"
]
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.1.9",
"version": "0.1.12",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.1.9",
"version": "0.1.12",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -44,5 +44,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.9"
"version": "0.1.12"
}

View File

@@ -55,6 +55,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
try:
videoFrames = await scrypted_sdk.sdk.connectRPCObject(videoFrames)
async for videoFrame in videoFrames:
videoFrame = await scrypted_sdk.sdk.connectRPCObject(videoFrame)
detected = await self.run_detection_videoframe(videoFrame, session)
yield {
'__json_copy_serialize_children': True,

View File

@@ -147,27 +147,9 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
pass
def getModelSettings(self, settings: Any = None) -> list[Setting]:
allowList: Setting = {
'title': 'Detections Types',
# 'subgroup': 'Advanced',
'description': 'The detections that will be reported. If none are specified, all detections will be reported. Select only detection types of interest for optimal performance.',
'choices': self.getClasses(),
'multiple': True,
'key': 'allowList',
'value': [
'person',
'dog',
'cat',
'car',
'truck',
'bus',
'motorcycle',
],
}
return []
return [allowList]
def create_detection_result(self, objs: List[Prediction], size, allowList, convert_to_src_size=None) -> ObjectsDetected:
def create_detection_result(self, objs: List[Prediction], size, convert_to_src_size=None) -> ObjectsDetected:
detections: List[ObjectDetectionResult] = []
detection_result: ObjectsDetected = {}
detection_result['detections'] = detections
@@ -175,8 +157,6 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
for obj in objs:
className = self.labels.get(obj.id, obj.id)
if allowList and len(allowList) and className not in allowList:
continue
detection: ObjectDetectionResult = {}
detection['boundingBox'] = (
obj.bbox.xmin, obj.bbox.ymin, obj.bbox.xmax - obj.bbox.xmin, obj.bbox.ymax - obj.bbox.ymin)

View File

@@ -133,6 +133,5 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
objs = await asyncio.get_event_loop().run_in_executor(self.executor, predict)
allowList = settings.get('allowList', None) if settings else None
ret = self.create_detection_result(objs, src_size, allowList, cvss)
ret = self.create_detection_result(objs, src_size, cvss)
return ret

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.1.9",
"version": "0.1.12",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.1.9",
"version": "0.1.12",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.9"
"version": "0.1.12"
}

View File

@@ -86,6 +86,5 @@ class TensorFlowPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk
))
objs.append(obj)
allowList = settings.get('allowList', None) if settings else None
ret = self.create_detection_result(objs, src_size, allowList, cvss)
ret = self.create_detection_result(objs, src_size, cvss)
return ret

View File

@@ -10,7 +10,7 @@
"port": 10081,
"request": "attach",
"skipFiles": [
"**/plugin-remote-worker.*",
"**/plugin-console.*",
"<node_internals>/**"
],
"preLaunchTask": "scrypted: deploy+debug",

View File

@@ -1,4 +1,4 @@
{
"scrypted.debugHost": "koushik-ubuntu",
"scrypted.debugHost": "127.0.0.1",
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/webrtc",
"version": "0.1.41",
"version": "0.1.42",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/webrtc",
"version": "0.1.41",
"version": "0.1.42",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/webrtc",
"version": "0.1.41",
"version": "0.1.42",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -454,14 +454,12 @@ export async function startRtpForwarderProcess(console: Console, ffmpegInput: FF
if (!allowAudioTranscoderExit)
killDeferred.resolve(undefined);
});
killDeferred.promise.finally(() => safeKillFFmpeg(cp));
if (pipeSdp) {
const pipe = cp.stdio[3] as Writable;
pipe.write(pipeSdp);
pipe.end();
}
ffmpegLogInitialOutput(console, cp);
killDeferred.promise.finally(() => safeKillFFmpeg(cp));
if (useRtp) {
cp.stdio[4].on('data', data => {

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/zwave",
"version": "0.0.56",
"version": "0.1.2",
"description": "Z-Wave USB Controller for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -37,9 +37,9 @@
"lodash": "^4.17.21"
},
"devDependencies": {
"@types/node": "^16.7.1"
"@types/node": "^18.15.11"
},
"optionalDependencies": {
"zwave-js": "^10.3.0"
"zwave-js": "^10.14.1"
}
}

View File

@@ -1,25 +1,19 @@
import { EntrySensor } from "@scrypted/sdk";
import type { ValueID } from "@zwave-js/core";
import { BarrierState } from "zwave-js";
import { ZwaveDeviceBase } from "./ZwaveDeviceBase";
export class EntrySensorToBarriorOperator extends ZwaveDeviceBase implements EntrySensor {
static updateState(zwaveDevice: ZwaveDeviceBase, valueId: ValueID) {
let currentValue: BarrierState
let currentValue: number;
currentValue = zwaveDevice.getValue(valueId);
switch (currentValue) {
case BarrierState.Closed:
case 0:
zwaveDevice.entryOpen = false;
break;
case BarrierState.Closing:
case BarrierState.Opening:
case BarrierState.Open:
case BarrierState.Stopped:
case 100:
zwaveDevice.entryOpen = true;
break;
default:
zwaveDevice.entryOpen = false;
}
}
}

View File

@@ -15,7 +15,7 @@ export class EntryToBarrierOperator extends ZwaveDeviceBase implements Entry {
this.entryOpen = (await cc.get()).currentState !== BarrierState.Closed;
}
static updateState(zwaveDevice: ZwaveDeviceBase, valueId: ValueID) {
zwaveDevice.entryOpen = zwaveDevice.getValue(valueId) !== 'Closed';
zwaveDevice.entryOpen = zwaveDevice.getValue(valueId) !== BarrierState.Closed;
}
}

View File

@@ -341,6 +341,9 @@ export class ZwaveControllerProvider extends ScryptedDeviceBase implements Devic
return this.devices[nativeId];
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
}
_addType(scryptedDevice: ZwaveDeviceBase, instance: Endpoint, type: CommandClassInfo, valueId: ValueID) {
var interfaces = type.getInterfaces(instance.getNodeUnsafe(), valueId);
if (!interfaces) {

4
sdk/package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/sdk",
"version": "0.2.87",
"version": "0.2.97",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/sdk",
"version": "0.2.87",
"version": "0.2.97",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/sdk",
"version": "0.2.87",
"version": "0.2.97",
"description": "",
"main": "dist/src/index.js",
"exports": {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/types",
"version": "0.2.79",
"version": "0.2.87",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/types",
"version": "0.2.79",
"version": "0.2.87",
"license": "ISC",
"devDependencies": {
"@types/rimraf": "^3.0.2",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/types",
"version": "0.2.79",
"version": "0.2.87",
"description": "",
"main": "dist/index.js",
"author": "",

View File

@@ -251,6 +251,7 @@ class ObjectDetectionResult(TypedDict):
className: str
history: ObjectDetectionHistory
id: str
movement: Any
name: str
resources: VideoResource
score: float
@@ -502,6 +503,7 @@ class ObjectDetectionModel(TypedDict):
inputFormat: Any | Any | Any
inputSize: list[float]
name: str
prebuffer: float
settings: list[Setting]
triggerClasses: list[str]
pass
@@ -520,10 +522,8 @@ class ObjectDetectionTypes(TypedDict):
class ObjectsDetected(TypedDict):
detectionId: str
detections: list[ObjectDetectionResult]
eventId: Any
inputDimensions: tuple[float, float]
resources: VideoResource
running: bool
timestamp: float
pass
@@ -670,7 +670,7 @@ class Setting(TypedDict):
readonly: bool
subgroup: str
title: str
type: Any | Any | Any | Any | Any | Any | Any | Any | Any | Any | Any
type: Any | Any | Any | Any | Any | Any | Any | Any | Any | Any | Any | Any | Any | Any
value: SettingValue
pass
@@ -1381,6 +1381,108 @@ class ScryptedInterfaceProperty(Enum):
fan = "fan"
applicationInfo = "applicationInfo"
class ScryptedInterfaceMethods(Enum):
listen = "listen"
probe = "probe"
setMixins = "setMixins"
setName = "setName"
setRoom = "setRoom"
setType = "setType"
getPluginJson = "getPluginJson"
turnOff = "turnOff"
turnOn = "turnOn"
setBrightness = "setBrightness"
getTemperatureMaxK = "getTemperatureMaxK"
getTemperatureMinK = "getTemperatureMinK"
setColorTemperature = "setColorTemperature"
setRgb = "setRgb"
setHsv = "setHsv"
sendNotification = "sendNotification"
start = "start"
stop = "stop"
pause = "pause"
resume = "resume"
dock = "dock"
setTemperature = "setTemperature"
setThermostatMode = "setThermostatMode"
setThermostatSetpoint = "setThermostatSetpoint"
setThermostatSetpointHigh = "setThermostatSetpointHigh"
setThermostatSetpointLow = "setThermostatSetpointLow"
setTemperatureUnit = "setTemperatureUnit"
getPictureOptions = "getPictureOptions"
takePicture = "takePicture"
getAudioStream = "getAudioStream"
startDisplay = "startDisplay"
stopDisplay = "stopDisplay"
getVideoStream = "getVideoStream"
getVideoStreamOptions = "getVideoStreamOptions"
getRecordingStream = "getRecordingStream"
getRecordingStreamCurrentTime = "getRecordingStreamCurrentTime"
getRecordingStreamOptions = "getRecordingStreamOptions"
getRecordingStreamThumbnail = "getRecordingStreamThumbnail"
ptzCommand = "ptzCommand"
getRecordedEvents = "getRecordedEvents"
getVideoClip = "getVideoClip"
getVideoClipThumbnail = "getVideoClipThumbnail"
getVideoClips = "getVideoClips"
removeVideoClips = "removeVideoClips"
setVideoStreamOptions = "setVideoStreamOptions"
startIntercom = "startIntercom"
stopIntercom = "stopIntercom"
lock = "lock"
unlock = "unlock"
addPassword = "addPassword"
getPasswords = "getPasswords"
removePassword = "removePassword"
activate = "activate"
deactivate = "deactivate"
isReversible = "isReversible"
closeEntry = "closeEntry"
openEntry = "openEntry"
getDevice = "getDevice"
releaseDevice = "releaseDevice"
adoptDevice = "adoptDevice"
discoverDevices = "discoverDevices"
createDevice = "createDevice"
getCreateDeviceSettings = "getCreateDeviceSettings"
getRefreshFrequency = "getRefreshFrequency"
refresh = "refresh"
getMediaStatus = "getMediaStatus"
load = "load"
seek = "seek"
skipNext = "skipNext"
skipPrevious = "skipPrevious"
convert = "convert"
getSettings = "getSettings"
putSetting = "putSetting"
armSecuritySystem = "armSecuritySystem"
disarmSecuritySystem = "disarmSecuritySystem"
getReadmeMarkdown = "getReadmeMarkdown"
getOauthUrl = "getOauthUrl"
onOauthCallback = "onOauthCallback"
canMixin = "canMixin"
getMixin = "getMixin"
releaseMixin = "releaseMixin"
onRequest = "onRequest"
onConnection = "onConnection"
onPush = "onPush"
run = "run"
eval = "eval"
loadScripts = "loadScripts"
saveScript = "saveScript"
trackObjects = "trackObjects"
getDetectionInput = "getDetectionInput"
getObjectTypes = "getObjectTypes"
detectObjects = "detectObjects"
generateObjectDetections = "generateObjectDetections"
getDetectionModel = "getDetectionModel"
setHumidity = "setHumidity"
setFan = "setFan"
startRTCSignalingSession = "startRTCSignalingSession"
createRTCSignalingSession = "createRTCSignalingSession"
getScryptedUserAccessControl = "getScryptedUserAccessControl"
generateVideoFrames = "generateVideoFrames"
class DeviceState:
def getScryptedProperty(self, property: str) -> Any:
pass

View File

@@ -195,6 +195,14 @@ for (const val of properties) {
`;
}
python += `
class ScryptedInterfaceMethods(Enum):
`
for (const val of methods) {
python += ` ${val} = "${val}"
`;
}
python += `
class DeviceState:
def getScryptedProperty(self, property: str) -> Any:

View File

@@ -1255,23 +1255,17 @@ export interface ObjectDetectionResult extends BoundingBoxResult {
name?: string;
score: number;
resources?: VideoResource;
/**
* Movement history will track the first/last time this object was moving.
*/
movement?: ObjectDetectionHistory & { moving?: boolean; };
}
export interface ObjectsDetected {
/**
* Object detection session state. Will be true if processing video, until
* the video ends or is timed out.
*/
running?: boolean;
detections?: ObjectDetectionResult[];
/**
* The id for the detection session.
*/
detectionId?: string;
/**
* The id for this specific event/frame within a detection video session.
* Will be undefined for single image detections.
*/
eventId?: any;
inputDimensions?: [number, number],
timestamp: number;
resources?: VideoResource;
@@ -1315,6 +1309,7 @@ export interface ObjectDetectionModel extends ObjectDetectionTypes {
inputFormat?: 'gray' | 'rgb' | 'rgba';
settings: Setting[];
triggerClasses?: string[];
prebuffer?: number;
}
export interface ObjectDetectionCallbacks {
onDetection(detection: ObjectsDetected, redetect?: (boundingBox: [number, number, number, number]) => Promise<ObjectDetectionResult[]>, mediaObject?: MediaObject): Promise<boolean>;
@@ -1687,6 +1682,16 @@ export interface SystemManager {
*/
getDeviceById<T>(id: string): ScryptedDevice & T;
/**
* Find a Scrypted device by pluginId and optionally the nativeId.
*/
getDeviceById(pluginId: string, nativeId?: ScryptedNativeId): ScryptedDevice;
/**
* Find a Scrypted device by pluginId and optionally the nativeId.
*/
getDeviceById<T>(pluginId: string, nativeId?: ScryptedNativeId): ScryptedDevice & T;
/**
* Find a Scrypted device by name.
*/
@@ -1836,7 +1841,7 @@ export interface Setting {
subgroup?: string;
description?: string;
placeholder?: string;
type?: 'string' | 'password' | 'number' | 'boolean' | 'device' | 'integer' | 'button' | 'clippath' | 'interface' | 'qrcode' | 'textarea';
type?: 'string' | 'password' | 'number' | 'boolean' | 'device' | 'integer' | 'button' | 'clippath' | 'interface' | 'qrcode' | 'textarea' | 'date' | 'time' | 'datetime';
/**
* The range of allowed numbers, if any, when the type is 'number'.
*/

View File

@@ -1,16 +1,16 @@
{
"name": "@scrypted/server",
"version": "0.7.53",
"version": "0.7.81",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/server",
"version": "0.7.53",
"version": "0.7.81",
"license": "ISC",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10",
"@scrypted/types": "^0.2.79",
"@scrypted/types": "^0.2.87",
"adm-zip": "^0.5.10",
"axios": "^0.21.4",
"body-parser": "^1.20.2",
@@ -128,9 +128,9 @@
}
},
"node_modules/@scrypted/types": {
"version": "0.2.79",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.79.tgz",
"integrity": "sha512-rlKQKCLGMEQlK8zdYmehd6s/B4NrKnbtjnJ5OszOk4vpNodfesIwx2g7r/+FJ5zPQJo0qZUjCaBhs0uzJoy/tA=="
"version": "0.2.87",
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.87.tgz",
"integrity": "sha512-sn6KsoY2PJCRhLK4ncnq7tbeZPXWR7lnUIRYs8bUipmt84eWBZo6Tave9OAZ0IoCGTAkR3kr3kFKet6L6cAWdQ=="
},
"node_modules/@tootallnate/once": {
"version": "1.1.2",

View File

@@ -1,10 +1,10 @@
{
"name": "@scrypted/server",
"version": "0.7.53",
"version": "0.7.81",
"description": "",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10",
"@scrypted/types": "^0.2.79",
"@scrypted/types": "^0.2.87",
"adm-zip": "^0.5.10",
"axios": "^0.21.4",
"body-parser": "^1.20.2",

Some files were not shown because too many files have changed in this diff Show More