mirror of
https://github.com/koush/scrypted.git
synced 2026-02-03 22:23:27 +00:00
Compare commits
185 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5de67fca86 | ||
|
|
98dc0b1b6d | ||
|
|
a05595ecc7 | ||
|
|
87be4648f1 | ||
|
|
60e51adb41 | ||
|
|
ace7720fe1 | ||
|
|
b9eb74d403 | ||
|
|
fb7353383d | ||
|
|
bee119b486 | ||
|
|
0b6ffc2b87 | ||
|
|
3863527b4d | ||
|
|
51c48f4a1c | ||
|
|
4c138e9b4c | ||
|
|
e762c305a3 | ||
|
|
5bce335288 | ||
|
|
8201e9883a | ||
|
|
74e5884285 | ||
|
|
9cffd9ffbe | ||
|
|
d8b617f2ae | ||
|
|
aeb564aa5d | ||
|
|
45f672883a | ||
|
|
c0ff857a1b | ||
|
|
64f7e31f54 | ||
|
|
6b55f8876e | ||
|
|
718a31f2c5 | ||
|
|
c1e1d50fa5 | ||
|
|
75c4a1939f | ||
|
|
0d703c2aff | ||
|
|
0a6e4fda75 | ||
|
|
4c2de9e443 | ||
|
|
b8a4fedf1a | ||
|
|
79d9f1d4a1 | ||
|
|
983213c578 | ||
|
|
7dd3d71ebd | ||
|
|
493f8deeef | ||
|
|
b29f2d5ee1 | ||
|
|
96bda10123 | ||
|
|
3294700d31 | ||
|
|
0cf77d4c76 | ||
|
|
953841e3a5 | ||
|
|
393c1017df | ||
|
|
f50176d14a | ||
|
|
7f2bf0b542 | ||
|
|
9e3990400c | ||
|
|
95eed80735 | ||
|
|
be43d0c017 | ||
|
|
386ea9a98a | ||
|
|
9b40978f61 | ||
|
|
f0ee435cd0 | ||
|
|
30748784ef | ||
|
|
8310e33719 | ||
|
|
1d18697161 | ||
|
|
d500b3fd6c | ||
|
|
95ae916b6c | ||
|
|
ec3e16f20f | ||
|
|
30d28f543c | ||
|
|
e0cce24999 | ||
|
|
409b25f8b0 | ||
|
|
8f278abec8 | ||
|
|
d6179dab82 | ||
|
|
ed186e2142 | ||
|
|
3c021bb2c8 | ||
|
|
c522edc622 | ||
|
|
022a103bcb | ||
|
|
efd125b6e4 | ||
|
|
19f7688a65 | ||
|
|
7f18e4629c | ||
|
|
dfe2c937a1 | ||
|
|
47d7a23a3d | ||
|
|
0ea609c80c | ||
|
|
71ee5727f1 | ||
|
|
2383f16112 | ||
|
|
7d5defd736 | ||
|
|
cbf4cf0579 | ||
|
|
422dd94e5c | ||
|
|
076f5e27f1 | ||
|
|
645de2e5fd | ||
|
|
dcf24a77d7 | ||
|
|
7065365a47 | ||
|
|
b82520776e | ||
|
|
638c1f77fd | ||
|
|
73a489ea37 | ||
|
|
77d69f025a | ||
|
|
3bc14ad248 | ||
|
|
03e5a9dec1 | ||
|
|
57b790c332 | ||
|
|
ce2ea63be7 | ||
|
|
2dd4721b7f | ||
|
|
667075dfad | ||
|
|
7abdb06b66 | ||
|
|
43e5822c93 | ||
|
|
bc579514e7 | ||
|
|
825100f94e | ||
|
|
803bfc1560 | ||
|
|
b2013a54ed | ||
|
|
f252407935 | ||
|
|
516f2a2a7b | ||
|
|
c1677ce691 | ||
|
|
5028fb812d | ||
|
|
2db4e2579f | ||
|
|
b339ca6cd2 | ||
|
|
f100999cb1 | ||
|
|
2863756bd6 | ||
|
|
cc408850a0 | ||
|
|
ed1ceeda51 | ||
|
|
df09d8e92a | ||
|
|
298ac960d1 | ||
|
|
62d4d55aae | ||
|
|
a2121c0dc5 | ||
|
|
9b5ea27c0b | ||
|
|
0b0e90fc04 | ||
|
|
d8aff609bf | ||
|
|
d8283c261a | ||
|
|
e3aca964be | ||
|
|
a96025c45f | ||
|
|
6afd4b4579 | ||
|
|
f97669949d | ||
|
|
0a0a31574f | ||
|
|
90fb751a22 | ||
|
|
b8d06fada5 | ||
|
|
2cecb1686f | ||
|
|
db03775530 | ||
|
|
cccbc33f1a | ||
|
|
5f23873366 | ||
|
|
e43accae67 | ||
|
|
b3a0cda6f9 | ||
|
|
58c3348282 | ||
|
|
a9e6d76e99 | ||
|
|
3b58936387 | ||
|
|
3a14ab81c8 | ||
|
|
291178a7b5 | ||
|
|
b65faf1a79 | ||
|
|
9d8a1353c0 | ||
|
|
b29d793178 | ||
|
|
d8e406d415 | ||
|
|
4529872fd6 | ||
|
|
fa86c31340 | ||
|
|
94ded75d40 | ||
|
|
887b61cd7a | ||
|
|
48e3d30987 | ||
|
|
02dba3cd71 | ||
|
|
195769034d | ||
|
|
39c08aa378 | ||
|
|
fa8056d38e | ||
|
|
145f116c68 | ||
|
|
15b6f336e4 | ||
|
|
8b46f0a466 | ||
|
|
a20cc5cd89 | ||
|
|
3d068929fd | ||
|
|
928f9b7579 | ||
|
|
c1c5a42645 | ||
|
|
12643cdde2 | ||
|
|
0bff96a6e6 | ||
|
|
4e7e67de54 | ||
|
|
65c4a30004 | ||
|
|
309a1dc11f | ||
|
|
b7904b73b2 | ||
|
|
9e9ddbc5f3 | ||
|
|
ceda54f91b | ||
|
|
1d4052b839 | ||
|
|
6a5d6e6617 | ||
|
|
f55cc6066f | ||
|
|
527714e434 | ||
|
|
8a1633ffa3 | ||
|
|
56b2ab9c4f | ||
|
|
d330e2eb9d | ||
|
|
b55e7cacb3 | ||
|
|
c70375db06 | ||
|
|
2c23021d40 | ||
|
|
84a4ef4539 | ||
|
|
7f3db0549b | ||
|
|
de0e1784a3 | ||
|
|
5a8798638e | ||
|
|
14da49728c | ||
|
|
55423b2d09 | ||
|
|
596106247b | ||
|
|
5472d90368 | ||
|
|
fcf58413fc | ||
|
|
0d03b91753 | ||
|
|
2fd088e4d6 | ||
|
|
c6933198b2 | ||
|
|
210e684a22 | ||
|
|
53cc4b6ef3 | ||
|
|
d58d138a68 | ||
|
|
c0199a2b76 |
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -32,9 +32,6 @@
|
||||
[submodule "plugins/sample-cameraprovider"]
|
||||
path = plugins/sample-cameraprovider
|
||||
url = ../../koush/scrypted-sample-cameraprovider
|
||||
[submodule "plugins/tensorflow-lite/sort_oh"]
|
||||
path = plugins/sort-tracker/sort_oh
|
||||
url = ../../koush/sort_oh.git
|
||||
[submodule "plugins/cloud/node-nat-upnp"]
|
||||
path = plugins/cloud/node-nat-upnp
|
||||
url = ../../koush/node-nat-upnp.git
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
export class Deferred<T> {
|
||||
finished = false;
|
||||
resolve!: (value: T|PromiseLike<T>) => void;
|
||||
reject!: (error: Error) => void;
|
||||
resolve!: (value: T|PromiseLike<T>) => this;
|
||||
reject!: (error: Error) => this;
|
||||
promise: Promise<T> = new Promise((resolve, reject) => {
|
||||
this.resolve = v => {
|
||||
this.finished = true;
|
||||
resolve(v);
|
||||
return this;
|
||||
};
|
||||
this.reject = e => {
|
||||
this.finished = true;
|
||||
reject(e);
|
||||
return this;
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
@@ -361,8 +361,7 @@ export interface RebroadcasterOptions {
|
||||
},
|
||||
}
|
||||
|
||||
export async function handleRebroadcasterClient(duplex: Promise<Duplex> | Duplex, options?: RebroadcasterOptions) {
|
||||
const socket = await duplex;
|
||||
export function handleRebroadcasterClient(socket: Duplex, options?: RebroadcasterOptions) {
|
||||
const firstWriteData = (data: StreamChunk) => {
|
||||
if (data.startStream) {
|
||||
socket.write(data.startStream)
|
||||
|
||||
@@ -62,4 +62,4 @@ export async function bind(server: dgram.Socket, port: number) {
|
||||
}
|
||||
}
|
||||
|
||||
export { listenZero, listenZeroSingleClient } from "@scrypted/server/src/listen-zero";
|
||||
export { listenZero, listenZeroSingleClient, ListenZeroSingleClientTimeoutError } from "@scrypted/server/src/listen-zero";
|
||||
|
||||
@@ -250,7 +250,8 @@ export class BrowserSignalingSession implements RTCSignalingSession {
|
||||
function logSendCandidate(console: Console, type: string, session: RTCSignalingSession): RTCSignalingSendIceCandidate {
|
||||
return async (candidate) => {
|
||||
try {
|
||||
console.log(`${type} trickled candidate:`, candidate.sdpMLineIndex, candidate.candidate);
|
||||
if (localStorage.getItem('debugLog') === 'true')
|
||||
console.log(`${type} trickled candidate:`, candidate.sdpMLineIndex, candidate.candidate);
|
||||
await session.addIceCandidate(candidate);
|
||||
}
|
||||
catch (e) {
|
||||
@@ -297,7 +298,7 @@ export async function connectRTCSignalingClients(
|
||||
if (offerOptions?.offer && answerOptions?.offer)
|
||||
throw new Error('Both RTC clients have offers and can not negotiate. Consider implementing this in @scrypted/webrtc.');
|
||||
|
||||
if (offerOptions?.requiresOffer && answerOptions.requiresOffer)
|
||||
if (offerOptions?.requiresOffer && answerOptions?.requiresOffer)
|
||||
throw new Error('Both RTC clients require offers and can not negotiate.');
|
||||
|
||||
offerSetup.type = 'offer';
|
||||
@@ -308,11 +309,13 @@ export async function connectRTCSignalingClients(
|
||||
|
||||
const offer = await offerClient.createLocalDescription('offer', offerSetup as RTCAVSignalingSetup,
|
||||
disableTrickle ? undefined : answerQueue.queueSendCandidate);
|
||||
console.log('offer sdp', offer.sdp);
|
||||
if (localStorage.getItem('debugLog') === 'true')
|
||||
console.log('offer sdp', offer.sdp);
|
||||
await answerClient.setRemoteDescription(offer, answerSetup as RTCAVSignalingSetup);
|
||||
const answer = await answerClient.createLocalDescription('answer', answerSetup as RTCAVSignalingSetup,
|
||||
disableTrickle ? undefined : offerQueue.queueSendCandidate);
|
||||
console.log('answer sdp', answer.sdp);
|
||||
if (localStorage.getItem('debugLog') === 'true')
|
||||
console.log('answer sdp', answer.sdp);
|
||||
await offerClient.setRemoteDescription(answer, offerSetup as RTCAVSignalingSetup);
|
||||
offerQueue.flush();
|
||||
answerQueue.flush();
|
||||
|
||||
@@ -129,6 +129,16 @@ export function getNaluTypes(streamChunk: StreamChunk) {
|
||||
return getNaluTypesInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
|
||||
}
|
||||
|
||||
export function getNaluFragmentInformation(nalu: Buffer) {
|
||||
const naluType = nalu[0] & 0x1f;
|
||||
const fua = naluType === H264_NAL_TYPE_FU_A;
|
||||
return {
|
||||
fua,
|
||||
fuaStart: fua && !!(nalu[1] & 0x80),
|
||||
fuaEnd: fua && !!(nalu[1] & 0x40),
|
||||
}
|
||||
}
|
||||
|
||||
export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
|
||||
const ret = new Set<number>();
|
||||
const naluType = nalu[0] & 0x1f;
|
||||
@@ -671,7 +681,7 @@ export class RtspClient extends RtspBase {
|
||||
});
|
||||
}
|
||||
|
||||
async setup(options: RtspClientTcpSetupOptions | RtspClientUdpSetupOptions) {
|
||||
async setup(options: RtspClientTcpSetupOptions | RtspClientUdpSetupOptions, headers?: Headers) {
|
||||
const protocol = options.type === 'udp' ? '' : '/TCP';
|
||||
const client = options.type === 'udp' ? 'client_port' : 'interleaved';
|
||||
let port: number;
|
||||
@@ -687,9 +697,9 @@ export class RtspClient extends RtspBase {
|
||||
port = options.dgram.address().port;
|
||||
options.dgram.on('message', data => options.onRtp(undefined, data));
|
||||
}
|
||||
const headers: any = {
|
||||
headers = Object.assign({
|
||||
Transport: `RTP/AVP${protocol};unicast;${client}=${port}-${port + 1}`,
|
||||
};
|
||||
}, headers);
|
||||
const response = await this.request('SETUP', headers, options.path);
|
||||
let interleaved: {
|
||||
begin: number;
|
||||
|
||||
@@ -63,7 +63,7 @@ RUN apt-get -y install \
|
||||
# which causes weird behavior in python which looks at the arch version
|
||||
# which still reports 64bit, even if running in 32bit docker.
|
||||
# this scenario is not supported and will be reported at runtime.
|
||||
RUN if [ "$(uname -m)" = "armv7l" ]; \
|
||||
RUN if [ "$(uname -m)" != "x86_64" ]; \
|
||||
then \
|
||||
apt-get -y install \
|
||||
python3-matplotlib \
|
||||
@@ -95,7 +95,8 @@ ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230322
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=full
|
||||
|
||||
################################################################
|
||||
# End section generated from template/Dockerfile.full.footer
|
||||
|
||||
@@ -42,4 +42,5 @@ ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230322
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=lite
|
||||
|
||||
@@ -21,4 +21,5 @@ ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230322
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=thin
|
||||
|
||||
@@ -90,4 +90,4 @@ services:
|
||||
# Must match the port in the auto update url above.
|
||||
- 10444:8080
|
||||
# check for updates once an hour (interval is in seconds)
|
||||
command: --interval 3600 --cleanup
|
||||
command: --interval 3600 --cleanup --scope scrypted
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[server]
|
||||
#host-name=
|
||||
use-ipv4=yes
|
||||
use-ipv6=no
|
||||
use-ipv6=yes
|
||||
enable-dbus=yes
|
||||
ratelimit-interval-usec=1000000
|
||||
ratelimit-burst=1000
|
||||
@@ -14,4 +14,4 @@ rlimit-core=0
|
||||
rlimit-data=4194304
|
||||
rlimit-fsize=0
|
||||
rlimit-nofile=768
|
||||
rlimit-stack=4194304
|
||||
rlimit-stack=4194304
|
||||
|
||||
@@ -44,51 +44,25 @@ RUN_IGNORE brew install node@18
|
||||
RUN brew install libvips
|
||||
# dlib
|
||||
RUN brew install cmake
|
||||
# gstreamer plugins
|
||||
RUN_IGNORE brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly
|
||||
# gst python bindings
|
||||
RUN_IGNORE brew install gst-python
|
||||
# python image library
|
||||
# todo: consider removing this
|
||||
RUN_IGNORE brew install pillow
|
||||
|
||||
### HACK WORKAROUND
|
||||
### https://github.com/koush/scrypted/issues/544
|
||||
|
||||
brew unpin gstreamer
|
||||
brew unpin gst-python
|
||||
brew unpin gst-plugins-ugly
|
||||
brew unpin gst-plugins-good
|
||||
brew unpin gst-plugins-base
|
||||
brew unpin gst-plugins-good
|
||||
brew unpin gst-plugins-bad
|
||||
brew unpin gst-plugins-ugly
|
||||
brew unpin gst-libav
|
||||
|
||||
brew unlink gstreamer
|
||||
brew unlink gst-python
|
||||
brew unlink gst-plugins-ugly
|
||||
brew unlink gst-plugins-good
|
||||
brew unlink gst-plugins-base
|
||||
brew unlink gst-plugins-bad
|
||||
brew unlink gst-libav
|
||||
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gstreamer.rb && brew install ./gstreamer.rb
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-python.rb && brew install ./gst-python.rb
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-ugly.rb && brew install ./gst-plugins-ugly.rb
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-good.rb && brew install ./gst-plugins-good.rb
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-base.rb && brew install ./gst-plugins-base.rb
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-bad.rb && brew install ./gst-plugins-bad.rb
|
||||
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-libav.rb && brew install ./gst-libav.rb
|
||||
|
||||
brew pin gstreamer
|
||||
brew pin gst-python
|
||||
brew pin gst-plugins-ugly
|
||||
brew pin gst-plugins-good
|
||||
brew pin gst-plugins-base
|
||||
brew pin gst-plugins-bad
|
||||
brew pin gst-libav
|
||||
brew unpin gst-python
|
||||
|
||||
### END HACK WORKAROUND
|
||||
|
||||
# gstreamer plugins
|
||||
RUN_IGNORE brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-libav
|
||||
# gst python bindings
|
||||
RUN_IGNORE brew install gst-python
|
||||
|
||||
ARCH=$(arch)
|
||||
if [ "$ARCH" = "arm64" ]
|
||||
then
|
||||
|
||||
@@ -42,7 +42,7 @@ fi
|
||||
WATCHTOWER_HTTP_API_TOKEN=$(echo $RANDOM | md5sum)
|
||||
DOCKER_COMPOSE_YML=$SCRYPTED_HOME/docker-compose.yml
|
||||
echo "Created $DOCKER_COMPOSE_YML"
|
||||
curl -s https://raw.githubusercontent.com/koush/scrypted/main/docker/docker-compose.yml | sed s/SET_THIS_TO_SOME_RANDOM_TEXT/"$(echo $RANDOM | md5sum)"/g > $DOCKER_COMPOSE_YML
|
||||
curl -s https://raw.githubusercontent.com/koush/scrypted/main/docker/docker-compose.yml | sed s/SET_THIS_TO_SOME_RANDOM_TEXT/"$(echo $RANDOM | md5sum | head -c 32)"/g > $DOCKER_COMPOSE_YML
|
||||
|
||||
echo "Setting permissions on $SCRYPTED_HOME"
|
||||
chown -R $SERVICE_USER $SCRYPTED_HOME
|
||||
|
||||
@@ -10,7 +10,8 @@ ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230322
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=full
|
||||
|
||||
################################################################
|
||||
# End section generated from template/Dockerfile.full.footer
|
||||
|
||||
@@ -60,7 +60,7 @@ RUN apt-get -y install \
|
||||
# which causes weird behavior in python which looks at the arch version
|
||||
# which still reports 64bit, even if running in 32bit docker.
|
||||
# this scenario is not supported and will be reported at runtime.
|
||||
RUN if [ "$(uname -m)" = "armv7l" ]; \
|
||||
RUN if [ "$(uname -m)" != "x86_64" ]; \
|
||||
then \
|
||||
apt-get -y install \
|
||||
python3-matplotlib \
|
||||
|
||||
2
external/ring-client-api
vendored
2
external/ring-client-api
vendored
Submodule external/ring-client-api updated: d571cdfc00...81f6570f59
@@ -27,13 +27,6 @@ echo "sdk > npm run build"
|
||||
npm run build
|
||||
popd
|
||||
|
||||
pushd external/HAP-NodeJS
|
||||
echo "external/HAP-NodeJS > npm install"
|
||||
npm install
|
||||
echo "external/HAP-NodeJS > npm run build"
|
||||
npm run build
|
||||
popd
|
||||
|
||||
pushd external/werift
|
||||
echo "external/werift > npm install"
|
||||
npm install
|
||||
|
||||
12
packages/client/package-lock.json
generated
12
packages/client/package-lock.json
generated
@@ -1,15 +1,15 @@
|
||||
{
|
||||
"name": "@scrypted/client",
|
||||
"version": "1.1.43",
|
||||
"version": "1.1.51",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/client",
|
||||
"version": "1.1.43",
|
||||
"version": "1.1.51",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@scrypted/types": "^0.2.76",
|
||||
"@scrypted/types": "^0.2.80",
|
||||
"axios": "^0.25.0",
|
||||
"engine.io-client": "^6.4.0",
|
||||
"rimraf": "^3.0.2"
|
||||
@@ -21,9 +21,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@scrypted/types": {
|
||||
"version": "0.2.76",
|
||||
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.76.tgz",
|
||||
"integrity": "sha512-/7n8ICkXj8TGba4cHvckLCgSNsOmOGQ8I+Jd8fX9sxkthgsZhF5At8PHhHdkCDS+yfSmfXHkcqluZZOfYPkpAg=="
|
||||
"version": "0.2.80",
|
||||
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.80.tgz",
|
||||
"integrity": "sha512-YVu7jcD5sYgjJLP7kH1K2FJzqrlcjdpDxzZoLXudZCKiujldbmLYcwglSgnN9bRqkKZcGOfru/WssvQj+0JioQ=="
|
||||
},
|
||||
"node_modules/@socket.io/component-emitter": {
|
||||
"version": "3.1.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/client",
|
||||
"version": "1.1.43",
|
||||
"version": "1.1.51",
|
||||
"description": "",
|
||||
"main": "dist/packages/client/src/index.js",
|
||||
"scripts": {
|
||||
@@ -17,7 +17,7 @@
|
||||
"typescript": "^4.9.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@scrypted/types": "^0.2.76",
|
||||
"@scrypted/types": "^0.2.80",
|
||||
"axios": "^0.25.0",
|
||||
"engine.io-client": "^6.4.0",
|
||||
"rimraf": "^3.0.2"
|
||||
|
||||
@@ -7,6 +7,7 @@ import { timeoutPromise } from "../../../common/src/promise-utils";
|
||||
import { BrowserSignalingSession, waitPeerConnectionIceConnected, waitPeerIceConnectionClosed } from "../../../common/src/rtc-signaling";
|
||||
import { DataChannelDebouncer } from "../../../plugins/webrtc/src/datachannel-debouncer";
|
||||
import type { IOSocket } from '../../../server/src/io';
|
||||
import { MediaObject } from '../../../server/src/plugin/mediaobject';
|
||||
import type { MediaObjectRemote } from '../../../server/src/plugin/plugin-api';
|
||||
import { attachPluginRemote } from '../../../server/src/plugin/plugin-remote';
|
||||
import { RpcPeer } from '../../../server/src/rpc';
|
||||
@@ -505,22 +506,7 @@ export async function connectScryptedClient(options: ScryptedClientOptions): Pro
|
||||
console.log('api attached', Date.now() - start);
|
||||
|
||||
mediaManager.createMediaObject = async<T extends MediaObjectOptions>(data: any, mimeType: string, options: T) => {
|
||||
const mo: MediaObjectRemote & {
|
||||
[RpcPeer.PROPERTY_PROXY_PROPERTIES]: any,
|
||||
[RpcPeer.PROPERTY_JSON_DISABLE_SERIALIZATION]: true,
|
||||
} = {
|
||||
[RpcPeer.PROPERTY_JSON_DISABLE_SERIALIZATION]: true,
|
||||
[RpcPeer.PROPERTY_PROXY_PROPERTIES]: {
|
||||
mimeType,
|
||||
sourceId: options?.sourceId,
|
||||
},
|
||||
mimeType,
|
||||
sourceId: options?.sourceId,
|
||||
async getData() {
|
||||
return data;
|
||||
},
|
||||
};
|
||||
return mo as any;
|
||||
return new MediaObject(mimeType, data, options) as any;
|
||||
}
|
||||
|
||||
const { browserSignalingSession, connectionManagementId, updateSessionId } = rpcPeer.params;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/alexa",
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.4",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
"prescrypted-setup-project": "scrypted-package-json",
|
||||
|
||||
@@ -15,6 +15,11 @@ const includeToken = 4;
|
||||
|
||||
export let DEBUG = false;
|
||||
|
||||
function debug(...args: any[]) {
|
||||
if (DEBUG)
|
||||
console.debug(...args);
|
||||
}
|
||||
|
||||
class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, MixinProvider, Settings {
|
||||
storageSettings = new StorageSettings(this, {
|
||||
tokenInfo: {
|
||||
@@ -34,6 +39,14 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
|
||||
description: 'This is the endpoint Alexa will use to send events to. This is set after you login.',
|
||||
type: 'string',
|
||||
readonly: true
|
||||
},
|
||||
debug: {
|
||||
title: 'Debug Events',
|
||||
description: 'Log all events to the console. This will be very noisy and should not be left enabled.',
|
||||
type: 'boolean',
|
||||
onPut(oldValue: boolean, newValue: boolean) {
|
||||
DEBUG = newValue;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -44,6 +57,8 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
|
||||
constructor(nativeId?: string) {
|
||||
super(nativeId);
|
||||
|
||||
DEBUG = this.storageSettings.values.debug ?? false;
|
||||
|
||||
alexaHandlers.set('Alexa.Authorization/AcceptGrant', this.onAlexaAuthorization);
|
||||
alexaHandlers.set('Alexa.Discovery/Discover', this.onDiscoverEndpoints);
|
||||
|
||||
@@ -141,12 +156,23 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
|
||||
if (!supportedType)
|
||||
return;
|
||||
|
||||
const report = await supportedType.sendEvent(eventSource, eventDetails, eventData);
|
||||
let report = await supportedType.sendEvent(eventSource, eventDetails, eventData);
|
||||
|
||||
if (!report && eventDetails.eventInterface === ScryptedInterface.Online) {
|
||||
report = {};
|
||||
}
|
||||
|
||||
if (!report && eventDetails.eventInterface === ScryptedInterface.Battery) {
|
||||
report = {};
|
||||
}
|
||||
|
||||
if (!report) {
|
||||
this.console.warn(`${eventDetails.eventInterface}.${eventDetails.property} not supported for device ${eventSource.type}`);
|
||||
return;
|
||||
}
|
||||
|
||||
debug("event", eventDetails.eventInterface, eventDetails.property, eventSource.type);
|
||||
|
||||
let data = {
|
||||
"event": {
|
||||
"header": {
|
||||
@@ -234,7 +260,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
|
||||
const endpoint = await this.getAlexaEndpoint();
|
||||
const self = this;
|
||||
|
||||
this.console.assert(!DEBUG, `event:`, data);
|
||||
debug("send event to alexa", data);
|
||||
|
||||
return axios.post(`https://${endpoint}/v3/events`, data, {
|
||||
headers: {
|
||||
@@ -570,6 +596,8 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
|
||||
const { authorization } = request.headers;
|
||||
if (!this.validAuths.has(authorization)) {
|
||||
try {
|
||||
debug("making authorization request to Scrypted");
|
||||
|
||||
await axios.get('https://home.scrypted.app/_punch/getcookie', {
|
||||
headers: {
|
||||
'Authorization': authorization,
|
||||
@@ -590,11 +618,11 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
|
||||
const { directive } = body;
|
||||
const { namespace, name } = directive.header;
|
||||
|
||||
this.console.assert(!DEBUG, `request: ${namespace}/${name}`);
|
||||
|
||||
const mapName = `${namespace}/${name}`;
|
||||
const handler = alexaHandlers.get(mapName);
|
||||
|
||||
debug("received directive from alexa", mapName, body);
|
||||
|
||||
const handler = alexaHandlers.get(mapName);
|
||||
if (handler)
|
||||
return handler.apply(this, [request, response, directive]);
|
||||
|
||||
@@ -641,7 +669,7 @@ class HttpResponseLoggingImpl implements AlexaHttpResponse {
|
||||
if (options.code !== 200)
|
||||
this.console.error(`response error ${options.code}:`, body);
|
||||
else
|
||||
this.console.assert(!DEBUG, `response ${options.code}:`, body);
|
||||
debug("response to alexa directive", options.code, body);
|
||||
|
||||
if (typeof body === 'object')
|
||||
body = JSON.stringify(body);
|
||||
|
||||
20
plugins/amcrest/package-lock.json
generated
20
plugins/amcrest/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/amcrest",
|
||||
"version": "0.0.119",
|
||||
"version": "0.0.121",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/amcrest",
|
||||
"version": "0.0.119",
|
||||
"version": "0.0.121",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
@@ -16,7 +16,7 @@
|
||||
"multiparty": "^4.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.0"
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
},
|
||||
"../../common": {
|
||||
@@ -36,7 +36,7 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.68",
|
||||
"version": "0.2.87",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
@@ -100,9 +100,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "16.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.0.tgz",
|
||||
"integrity": "sha512-8MLkBIYQMuhRBQzGN9875bYsOhPnf/0rgXGo66S2FemHkhbn9qtsz9ywV1iCG+vbjigE4WUNVvw37Dx+L0qsPg=="
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
|
||||
},
|
||||
"node_modules/auth-header": {
|
||||
"version": "1.0.0",
|
||||
@@ -291,9 +291,9 @@
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "16.11.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.0.tgz",
|
||||
"integrity": "sha512-8MLkBIYQMuhRBQzGN9875bYsOhPnf/0rgXGo66S2FemHkhbn9qtsz9ywV1iCG+vbjigE4WUNVvw37Dx+L0qsPg=="
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
|
||||
},
|
||||
"auth-header": {
|
||||
"version": "1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/amcrest",
|
||||
"version": "0.0.119",
|
||||
"version": "0.0.121",
|
||||
"description": "Amcrest Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
@@ -36,12 +36,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/multiparty": "^0.0.33",
|
||||
"multiparty": "^4.2.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.0"
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -616,7 +616,7 @@ class AmcrestProvider extends RtspProvider {
|
||||
this.console.warn('Error probing two way audio', e);
|
||||
}
|
||||
}
|
||||
settings.newCamera ||= 'Hikvision Camera';
|
||||
settings.newCamera ||= 'Amcrest Camera';
|
||||
|
||||
nativeId = await super.createDevice(settings, nativeId);
|
||||
|
||||
|
||||
6
plugins/arlo/package-lock.json
generated
6
plugins/arlo/package-lock.json
generated
@@ -1,19 +1,19 @@
|
||||
{
|
||||
"name": "@scrypted/arlo",
|
||||
"version": "0.7.4",
|
||||
"version": "0.7.13",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/arlo",
|
||||
"version": "0.7.4",
|
||||
"version": "0.7.13",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.85",
|
||||
"version": "0.2.87",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/arlo",
|
||||
"version": "0.7.4",
|
||||
"version": "0.7.13",
|
||||
"description": "Arlo Plugin for Scrypted",
|
||||
"keywords": [
|
||||
"scrypted",
|
||||
|
||||
@@ -383,6 +383,33 @@ class Arlo(object):
|
||||
self.HandleEvents(basestation, resource, [('is', 'motionDetected')], callbackwrapper)
|
||||
)
|
||||
|
||||
def SubscribeToAudioEvents(self, basestation, camera, callback):
|
||||
"""
|
||||
Use this method to subscribe to audio events. You must provide a callback function which will get called once per audio event.
|
||||
|
||||
The callback function should have the following signature:
|
||||
def callback(self, event)
|
||||
|
||||
This is an example of handling a specific event, in reality, you'd probably want to write a callback for HandleEvents()
|
||||
that has a big switch statement in it to handle all the various events Arlo produces.
|
||||
|
||||
Returns the Task object that contains the subscription loop.
|
||||
"""
|
||||
resource = f"cameras/{camera.get('deviceId')}"
|
||||
|
||||
def callbackwrapper(self, event):
|
||||
properties = event.get('properties', {})
|
||||
stop = None
|
||||
if 'audioDetected' in properties:
|
||||
stop = callback(properties['audioDetected'])
|
||||
if not stop:
|
||||
return None
|
||||
return stop
|
||||
|
||||
return asyncio.get_event_loop().create_task(
|
||||
self.HandleEvents(basestation, resource, [('is', 'audioDetected')], callbackwrapper)
|
||||
)
|
||||
|
||||
def SubscribeToBatteryEvents(self, basestation, camera, callback):
|
||||
"""
|
||||
Use this method to subscribe to battery events. You must provide a callback function which will get called once per battery event.
|
||||
@@ -711,7 +738,20 @@ class Arlo(object):
|
||||
callback,
|
||||
)
|
||||
|
||||
def SirenOn(self, basestation):
|
||||
def SirenOn(self, basestation, camera=None):
|
||||
if camera is not None:
|
||||
resource = f"siren/{camera.get('deviceId')}"
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": resource,
|
||||
"publishResponse": True,
|
||||
"properties": {
|
||||
"sirenState": "on",
|
||||
"duration": 300,
|
||||
"volume": 8,
|
||||
"pattern": "alarm"
|
||||
}
|
||||
})
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": "siren",
|
||||
@@ -724,7 +764,20 @@ class Arlo(object):
|
||||
}
|
||||
})
|
||||
|
||||
def SirenOff(self, basestation):
|
||||
def SirenOff(self, basestation, camera=None):
|
||||
if camera is not None:
|
||||
resource = f"siren/{camera.get('deviceId')}"
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": resource,
|
||||
"publishResponse": True,
|
||||
"properties": {
|
||||
"sirenState": "off",
|
||||
"duration": 300,
|
||||
"volume": 8,
|
||||
"pattern": "alarm"
|
||||
}
|
||||
})
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": "siren",
|
||||
@@ -737,6 +790,58 @@ class Arlo(object):
|
||||
}
|
||||
})
|
||||
|
||||
def SpotlightOn(self, basestation, camera):
|
||||
resource = f"cameras/{camera.get('deviceId')}"
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": resource,
|
||||
"publishResponse": True,
|
||||
"properties": {
|
||||
"spotlight": {
|
||||
"enabled": True,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
def SpotlightOff(self, basestation, camera):
|
||||
resource = f"cameras/{camera.get('deviceId')}"
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": resource,
|
||||
"publishResponse": True,
|
||||
"properties": {
|
||||
"spotlight": {
|
||||
"enabled": False,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
def FloodlightOn(self, basestation, camera):
|
||||
resource = f"cameras/{camera.get('deviceId')}"
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": resource,
|
||||
"publishResponse": True,
|
||||
"properties": {
|
||||
"floodlight": {
|
||||
"on": True,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
def FloodlightOff(self, basestation, camera):
|
||||
resource = f"cameras/{camera.get('deviceId')}"
|
||||
return self.Notify(basestation, {
|
||||
"action": "set",
|
||||
"resource": resource,
|
||||
"publishResponse": True,
|
||||
"properties": {
|
||||
"floodlight": {
|
||||
"on": False,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
def GetLibrary(self, device, from_date: datetime, to_date: datetime):
|
||||
"""
|
||||
This call returns the following:
|
||||
@@ -784,4 +889,13 @@ class Arlo(object):
|
||||
'dateFrom': from_date,
|
||||
'dateTo': to_date
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
def GetSmartFeatures(self, device) -> dict:
|
||||
smart_features = self._getSmartFeaturesCached()
|
||||
key = f"{device['owner']['ownerId']}_{device['deviceId']}"
|
||||
return smart_features["features"].get(key, {})
|
||||
|
||||
@cached(cache=TTLCache(maxsize=1, ttl=60))
|
||||
def _getSmartFeaturesCached(self) -> dict:
|
||||
return self.request.get(f'https://{self.BASE_URL}/hmsweb/users/subscription/smart/features')
|
||||
@@ -66,15 +66,4 @@ class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTa
|
||||
|
||||
def get_builtin_child_device_manifests(self) -> List[Device]:
|
||||
"""Returns the list of child device manifests representing hardware features built into this device."""
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def async_print_exception_guard(self, fn):
|
||||
"""Decorator to print an exception's stack trace before re-raising the exception."""
|
||||
async def wrapped(*args, **kwargs):
|
||||
try:
|
||||
return await fn(*args, **kwargs)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
raise
|
||||
return wrapped
|
||||
return []
|
||||
@@ -14,11 +14,20 @@ if TYPE_CHECKING:
|
||||
|
||||
|
||||
class ArloBasestation(ArloDeviceBase, DeviceProvider):
|
||||
MODELS_WITH_SIRENS = [
|
||||
"vmb4000",
|
||||
"vmb4500"
|
||||
]
|
||||
|
||||
vss: ArloSirenVirtualSecuritySystem = None
|
||||
|
||||
def __init__(self, nativeId: str, arlo_basestation: dict, provider: ArloProvider) -> None:
|
||||
super().__init__(nativeId=nativeId, arlo_device=arlo_basestation, arlo_basestation=arlo_basestation, provider=provider)
|
||||
|
||||
@property
|
||||
def has_siren(self) -> bool:
|
||||
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloBasestation.MODELS_WITH_SIRENS])
|
||||
|
||||
def get_applicable_interfaces(self) -> List[str]:
|
||||
return [ScryptedInterface.DeviceProvider.value]
|
||||
|
||||
@@ -26,8 +35,11 @@ class ArloBasestation(ArloDeviceBase, DeviceProvider):
|
||||
return ScryptedDeviceType.DeviceProvider.value
|
||||
|
||||
def get_builtin_child_device_manifests(self) -> List[Device]:
|
||||
vss_id = f'{self.arlo_device["deviceId"]}.vss'
|
||||
vss = self.get_or_create_vss(vss_id)
|
||||
if not self.has_siren:
|
||||
# this basestation has no builtin siren, so no manifests to return
|
||||
return []
|
||||
|
||||
vss = self.get_or_create_vss()
|
||||
return [
|
||||
{
|
||||
"info": {
|
||||
@@ -36,7 +48,7 @@ class ArloBasestation(ArloDeviceBase, DeviceProvider):
|
||||
"firmware": self.arlo_device.get("firmwareVersion"),
|
||||
"serialNumber": self.arlo_device["deviceId"],
|
||||
},
|
||||
"nativeId": vss_id,
|
||||
"nativeId": vss.nativeId,
|
||||
"name": f'{self.arlo_device["deviceName"]} Siren Virtual Security System',
|
||||
"interfaces": vss.get_applicable_interfaces(),
|
||||
"type": vss.get_device_type(),
|
||||
@@ -48,11 +60,12 @@ class ArloBasestation(ArloDeviceBase, DeviceProvider):
|
||||
if not nativeId.startswith(self.nativeId):
|
||||
# must be a camera, so get it from the provider
|
||||
return await self.provider.getDevice(nativeId)
|
||||
return self.get_or_create_vss(nativeId)
|
||||
|
||||
def get_or_create_vss(self, nativeId: str) -> ArloSirenVirtualSecuritySystem:
|
||||
if not nativeId.endswith("vss"):
|
||||
return None
|
||||
return self.get_or_create_vss()
|
||||
|
||||
def get_or_create_vss(self) -> ArloSirenVirtualSecuritySystem:
|
||||
vss_id = f'{self.arlo_device["deviceId"]}.vss'
|
||||
if not self.vss:
|
||||
self.vss = ArloSirenVirtualSecuritySystem(nativeId, self.arlo_device, self.arlo_basestation, self.provider)
|
||||
self.vss = ArloSirenVirtualSecuritySystem(vss_id, self.arlo_device, self.arlo_basestation, self.provider, self)
|
||||
return self.vss
|
||||
@@ -10,24 +10,76 @@ from typing import List, TYPE_CHECKING
|
||||
import scrypted_arlo_go
|
||||
|
||||
import scrypted_sdk
|
||||
from scrypted_sdk.types import Setting, Settings, Camera, VideoCamera, VideoClips, VideoClip, VideoClipOptions, MotionSensor, Battery, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
|
||||
from scrypted_sdk.types import Setting, Settings, Device, Camera, VideoCamera, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
|
||||
|
||||
from .base import ArloDeviceBase
|
||||
from .spotlight import ArloSpotlight, ArloFloodlight
|
||||
from .vss import ArloSirenVirtualSecuritySystem
|
||||
from .child_process import HeartbeatChildProcess
|
||||
from .util import BackgroundTaskMixin
|
||||
from .util import BackgroundTaskMixin, async_print_exception_guard
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
|
||||
from .provider import ArloProvider
|
||||
|
||||
|
||||
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, MotionSensor, Battery):
|
||||
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider, VideoClips, MotionSensor, AudioSensor, Battery):
|
||||
MODELS_WITH_SPOTLIGHTS = [
|
||||
"vmc4040p",
|
||||
"vmc2030",
|
||||
"vmc2032",
|
||||
"vmc4041p",
|
||||
"vmc4050p",
|
||||
"vmc5040",
|
||||
"vml2030",
|
||||
"vml4030",
|
||||
]
|
||||
|
||||
MODELS_WITH_FLOODLIGHTS = ["fb1001"]
|
||||
|
||||
MODELS_WITH_SIRENS = [
|
||||
"vmc4040p",
|
||||
"fb1001",
|
||||
"vmc2030",
|
||||
"vmc2020",
|
||||
"vmc2032",
|
||||
"vmc4041p",
|
||||
"vmc4050p",
|
||||
"vmc5040",
|
||||
"vml2030",
|
||||
"vmc4030",
|
||||
"vml4030",
|
||||
"vmc4030p",
|
||||
]
|
||||
|
||||
MODELS_WITH_AUDIO_SENSORS = [
|
||||
"vmc4040p",
|
||||
"fb1001",
|
||||
"vmc4041p",
|
||||
"vmc4050p",
|
||||
"vmc5040",
|
||||
"vmc3040",
|
||||
"vmc3040s",
|
||||
"vmc4030",
|
||||
"vml4030",
|
||||
"vmc4030p",
|
||||
]
|
||||
|
||||
MODELS_WITHOUT_BATTERY = [
|
||||
"avd1001",
|
||||
"vmc3040",
|
||||
"vmc3040s",
|
||||
]
|
||||
|
||||
timeout: int = 30
|
||||
intercom_session = None
|
||||
light: ArloSpotlight = None
|
||||
vss: ArloSirenVirtualSecuritySystem = None
|
||||
|
||||
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
|
||||
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
|
||||
self.start_motion_subscription()
|
||||
self.start_audio_subscription()
|
||||
self.start_battery_subscription()
|
||||
|
||||
def start_motion_subscription(self) -> None:
|
||||
@@ -39,7 +91,22 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
self.provider.arlo.SubscribeToMotionEvents(self.arlo_basestation, self.arlo_device, callback)
|
||||
)
|
||||
|
||||
def start_audio_subscription(self) -> None:
|
||||
if not self.has_audio_sensor:
|
||||
return
|
||||
|
||||
def callback(audioDetected):
|
||||
self.audioDetected = audioDetected
|
||||
return self.stop_subscriptions
|
||||
|
||||
self.register_task(
|
||||
self.provider.arlo.SubscribeToAudioEvents(self.arlo_basestation, self.arlo_device, callback)
|
||||
)
|
||||
|
||||
def start_battery_subscription(self) -> None:
|
||||
if self.wired_to_power:
|
||||
return
|
||||
|
||||
def callback(batteryLevel):
|
||||
self.batteryLevel = batteryLevel
|
||||
return self.stop_subscriptions
|
||||
@@ -53,9 +120,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
ScryptedInterface.VideoCamera.value,
|
||||
ScryptedInterface.Camera.value,
|
||||
ScryptedInterface.MotionSensor.value,
|
||||
ScryptedInterface.Battery.value,
|
||||
ScryptedInterface.Settings.value,
|
||||
ScryptedInterface.VideoClips.value,
|
||||
])
|
||||
|
||||
if self.two_way_audio:
|
||||
@@ -66,6 +131,21 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
results.add(ScryptedInterface.RTCSignalingChannel.value)
|
||||
results.discard(ScryptedInterface.Intercom.value)
|
||||
|
||||
if self.has_battery:
|
||||
results.add(ScryptedInterface.Battery.value)
|
||||
|
||||
if self.wired_to_power:
|
||||
results.discard(ScryptedInterface.Battery.value)
|
||||
|
||||
if self.has_siren or self.has_spotlight or self.has_floodlight:
|
||||
results.add(ScryptedInterface.DeviceProvider.value)
|
||||
|
||||
if self.has_audio_sensor:
|
||||
results.add(ScryptedInterface.AudioSensor.value)
|
||||
|
||||
if self.has_cloud_recording:
|
||||
results.add(ScryptedInterface.VideoClips.value)
|
||||
|
||||
if not self._can_push_to_talk():
|
||||
results.discard(ScryptedInterface.RTCSignalingChannel.value)
|
||||
results.discard(ScryptedInterface.Intercom.value)
|
||||
@@ -75,6 +155,42 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
def get_device_type(self) -> str:
|
||||
return ScryptedDeviceType.Camera.value
|
||||
|
||||
def get_builtin_child_device_manifests(self) -> List[Device]:
|
||||
results = []
|
||||
if self.has_spotlight or self.has_floodlight:
|
||||
light = self.get_or_create_spotlight_or_floodlight()
|
||||
results.append({
|
||||
"info": {
|
||||
"model": f"{self.arlo_device['modelId']} {self.arlo_device['properties'].get('hwVersion', '')}".strip(),
|
||||
"manufacturer": "Arlo",
|
||||
"firmware": self.arlo_device.get("firmwareVersion"),
|
||||
"serialNumber": self.arlo_device["deviceId"],
|
||||
},
|
||||
"nativeId": light.nativeId,
|
||||
"name": f'{self.arlo_device["deviceName"]} {"Spotlight" if self.has_spotlight else "Floodlight"}',
|
||||
"interfaces": light.get_applicable_interfaces(),
|
||||
"type": light.get_device_type(),
|
||||
"providerNativeId": self.nativeId,
|
||||
})
|
||||
if self.has_siren:
|
||||
vss = self.get_or_create_vss()
|
||||
results.extend([
|
||||
{
|
||||
"info": {
|
||||
"model": f"{self.arlo_device['modelId']} {self.arlo_device['properties'].get('hwVersion', '')}".strip(),
|
||||
"manufacturer": "Arlo",
|
||||
"firmware": self.arlo_device.get("firmwareVersion"),
|
||||
"serialNumber": self.arlo_device["deviceId"],
|
||||
},
|
||||
"nativeId": vss.nativeId,
|
||||
"name": f'{self.arlo_device["deviceName"]} Siren Virtual Security System',
|
||||
"interfaces": vss.get_applicable_interfaces(),
|
||||
"type": vss.get_device_type(),
|
||||
"providerNativeId": self.nativeId,
|
||||
},
|
||||
] + vss.get_builtin_child_device_manifests())
|
||||
return results
|
||||
|
||||
@property
|
||||
def webrtc_emulation(self) -> bool:
|
||||
if self.storage:
|
||||
@@ -92,9 +208,53 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
else:
|
||||
return True
|
||||
|
||||
@property
|
||||
def wired_to_power(self) -> bool:
|
||||
if self.storage:
|
||||
return True if self.storage.getItem("wired_to_power") else False
|
||||
else:
|
||||
return False
|
||||
|
||||
@property
|
||||
def has_cloud_recording(self) -> bool:
|
||||
return self.provider.arlo.GetSmartFeatures(self.arlo_device).get("planFeatures", {}).get("eventRecording", False)
|
||||
|
||||
@property
|
||||
def has_spotlight(self) -> bool:
|
||||
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_SPOTLIGHTS])
|
||||
|
||||
@property
|
||||
def has_floodlight(self) -> bool:
|
||||
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_FLOODLIGHTS])
|
||||
|
||||
@property
|
||||
def has_siren(self) -> bool:
|
||||
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_SIRENS])
|
||||
|
||||
@property
|
||||
def has_audio_sensor(self) -> bool:
|
||||
return any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITH_AUDIO_SENSORS])
|
||||
|
||||
@property
|
||||
def has_battery(self) -> bool:
|
||||
return not any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITHOUT_BATTERY])
|
||||
|
||||
async def getSettings(self) -> List[Setting]:
|
||||
result = []
|
||||
if self.has_battery:
|
||||
result.append(
|
||||
{
|
||||
"key": "wired_to_power",
|
||||
"title": "Plugged In to External Power",
|
||||
"value": self.wired_to_power,
|
||||
"description": "Informs Scrypted that this device is plugged in to an external power source. " + \
|
||||
"Will allow features like persistent prebuffer to work, however will no longer report this device's battery percentage. " + \
|
||||
"Note that a persistent prebuffer may cause excess battery drain if the external power is not able to charge faster than the battery consumption rate.",
|
||||
"type": "boolean",
|
||||
},
|
||||
)
|
||||
if self._can_push_to_talk():
|
||||
return [
|
||||
result.extend([
|
||||
{
|
||||
"key": "two_way_audio",
|
||||
"title": "(Experimental) Enable native two-way audio",
|
||||
@@ -110,17 +270,19 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
"If enabled, takes precedence over native two-way audio. May use increased system resources.",
|
||||
"type": "boolean",
|
||||
},
|
||||
]
|
||||
return []
|
||||
])
|
||||
return result
|
||||
|
||||
@async_print_exception_guard
|
||||
async def putSetting(self, key, value) -> None:
|
||||
if key in ["webrtc_emulation", "two_way_audio"]:
|
||||
self.storage.setItem(key, value == "true")
|
||||
await self.provider.discoverDevices()
|
||||
if key in ["webrtc_emulation", "two_way_audio", "wired_to_power"]:
|
||||
self.storage.setItem(key, value == "true" or value == True)
|
||||
await self.provider.discover_devices()
|
||||
|
||||
async def getPictureOptions(self) -> List[ResponsePictureOptions]:
|
||||
return []
|
||||
|
||||
@async_print_exception_guard
|
||||
async def takePicture(self, options: dict = None) -> MediaObject:
|
||||
self.logger.info("Taking picture")
|
||||
|
||||
@@ -128,7 +290,11 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
msos = await real_device.getVideoStreamOptions()
|
||||
if any(["prebuffer" in m for m in msos]):
|
||||
self.logger.info("Getting snapshot from prebuffer")
|
||||
return await real_device.getVideoStream({"refresh": False})
|
||||
try:
|
||||
return await real_device.getVideoStream({"refresh": False})
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not fetch from prebuffer due to: {e}")
|
||||
self.logger.warning("Will try to fetch snapshot from Arlo cloud")
|
||||
|
||||
pic_url = await asyncio.wait_for(self.provider.arlo.TriggerFullFrameSnapshot(self.arlo_basestation, self.arlo_device), timeout=self.timeout)
|
||||
self.logger.debug(f"Got snapshot URL for at {pic_url}")
|
||||
@@ -180,32 +346,30 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
}
|
||||
return await scrypted_sdk.mediaManager.createFFmpegMediaObject(ffmpeg_input)
|
||||
|
||||
@async_print_exception_guard
|
||||
async def startRTCSignalingSession(self, scrypted_session):
|
||||
try:
|
||||
plugin_session = ArloCameraRTCSignalingSession(self)
|
||||
await plugin_session.initialize()
|
||||
plugin_session = ArloCameraRTCSignalingSession(self)
|
||||
await plugin_session.initialize()
|
||||
|
||||
scrypted_setup = {
|
||||
"type": "offer",
|
||||
"audio": {
|
||||
"direction": "sendrecv" if self._can_push_to_talk() else "recvonly",
|
||||
},
|
||||
"video": {
|
||||
"direction": "recvonly",
|
||||
}
|
||||
scrypted_setup = {
|
||||
"type": "offer",
|
||||
"audio": {
|
||||
"direction": "sendrecv" if self._can_push_to_talk() else "recvonly",
|
||||
},
|
||||
"video": {
|
||||
"direction": "recvonly",
|
||||
}
|
||||
plugin_setup = {}
|
||||
}
|
||||
plugin_setup = {}
|
||||
|
||||
scrypted_offer = await scrypted_session.createLocalDescription("offer", scrypted_setup, sendIceCandidate=plugin_session.addIceCandidate)
|
||||
self.logger.info(f"Scrypted offer sdp:\n{scrypted_offer['sdp']}")
|
||||
await plugin_session.setRemoteDescription(scrypted_offer, plugin_setup)
|
||||
plugin_answer = await plugin_session.createLocalDescription("answer", plugin_setup, scrypted_session.sendIceCandidate)
|
||||
self.logger.info(f"Scrypted answer sdp:\n{plugin_answer['sdp']}")
|
||||
await scrypted_session.setRemoteDescription(plugin_answer, scrypted_setup)
|
||||
scrypted_offer = await scrypted_session.createLocalDescription("offer", scrypted_setup, sendIceCandidate=plugin_session.addIceCandidate)
|
||||
self.logger.info(f"Scrypted offer sdp:\n{scrypted_offer['sdp']}")
|
||||
await plugin_session.setRemoteDescription(scrypted_offer, plugin_setup)
|
||||
plugin_answer = await plugin_session.createLocalDescription("answer", plugin_setup, scrypted_session.sendIceCandidate)
|
||||
self.logger.info(f"Scrypted answer sdp:\n{plugin_answer['sdp']}")
|
||||
await scrypted_session.setRemoteDescription(plugin_answer, scrypted_setup)
|
||||
|
||||
return ArloCameraRTCSessionControl(plugin_session)
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
return ArloCameraRTCSessionControl(plugin_session)
|
||||
|
||||
async def startIntercom(self, media) -> None:
|
||||
self.logger.info("Starting intercom")
|
||||
@@ -281,11 +445,36 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, Moti
|
||||
clips.reverse()
|
||||
return clips
|
||||
|
||||
@async_print_exception_guard
|
||||
async def removeVideoClips(self, videoClipIds: List[str]) -> None:
|
||||
# Arlo does support deleting, but let's be safe and disable that
|
||||
self.logger.error("deleting Arlo video clips is not implemented by this plugin")
|
||||
raise Exception("deleting Arlo video clips is not implemented by this plugin")
|
||||
|
||||
async def getDevice(self, nativeId: str) -> ArloDeviceBase:
|
||||
if (nativeId.endswith("spotlight") and self.has_spotlight) or (nativeId.endswith("floodlight") and self.has_floodlight):
|
||||
return self.get_or_create_spotlight_or_floodlight()
|
||||
if nativeId.endswith("vss") and self.has_siren:
|
||||
return self.get_or_create_vss()
|
||||
return None
|
||||
|
||||
def get_or_create_spotlight_or_floodlight(self) -> ArloSpotlight:
|
||||
if self.has_spotlight:
|
||||
light_id = f'{self.arlo_device["deviceId"]}.spotlight'
|
||||
if not self.light:
|
||||
self.light = ArloSpotlight(light_id, self.arlo_device, self.arlo_basestation, self.provider, self)
|
||||
elif self.has_floodlight:
|
||||
light_id = f'{self.arlo_device["deviceId"]}.floodlight'
|
||||
if not self.light:
|
||||
self.light = ArloFloodlight(light_id, self.arlo_device, self.arlo_basestation, self.provider, self)
|
||||
return self.light
|
||||
|
||||
def get_or_create_vss(self) -> ArloSirenVirtualSecuritySystem:
|
||||
if self.has_siren:
|
||||
vss_id = f'{self.arlo_device["deviceId"]}.vss'
|
||||
if not self.vss:
|
||||
self.vss = ArloSirenVirtualSecuritySystem(vss_id, self.arlo_device, self.arlo_basestation, self.provider, self)
|
||||
return self.vss
|
||||
|
||||
|
||||
class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
|
||||
def __init__(self, camera):
|
||||
|
||||
@@ -31,8 +31,4 @@ class ArloDoorbell(ArloCamera, BinarySensor):
|
||||
def get_applicable_interfaces(self) -> List[str]:
|
||||
camera_interfaces = super().get_applicable_interfaces()
|
||||
camera_interfaces.append(ScryptedInterface.BinarySensor.value)
|
||||
|
||||
model_id = self.arlo_device['modelId'].lower()
|
||||
if model_id.startswith("avd1001"):
|
||||
camera_interfaces.remove(ScryptedInterface.Battery.value)
|
||||
return camera_interfaces
|
||||
|
||||
@@ -10,26 +10,27 @@ from typing import List
|
||||
|
||||
import scrypted_sdk
|
||||
from scrypted_sdk import ScryptedDeviceBase
|
||||
from scrypted_sdk.types import Setting, SettingValue, Settings, DeviceProvider, DeviceDiscovery, ScryptedInterface
|
||||
from scrypted_sdk.types import Setting, SettingValue, Settings, DeviceProvider, ScryptedInterface
|
||||
|
||||
from .arlo import Arlo
|
||||
from .arlo.arlo_async import change_stream_class
|
||||
from .arlo.logging import logger as arlo_lib_logger
|
||||
from .logging import ScryptedDeviceLoggerMixin
|
||||
from .util import BackgroundTaskMixin
|
||||
from .util import BackgroundTaskMixin, async_print_exception_guard
|
||||
from .camera import ArloCamera
|
||||
from .doorbell import ArloDoorbell
|
||||
from .basestation import ArloBasestation
|
||||
from .base import ArloDeviceBase
|
||||
|
||||
|
||||
class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery, ScryptedDeviceLoggerMixin, BackgroundTaskMixin):
|
||||
class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceLoggerMixin, BackgroundTaskMixin):
|
||||
arlo_cameras = None
|
||||
arlo_basestations = None
|
||||
_arlo_mfa_code = None
|
||||
scrypted_devices = None
|
||||
_arlo = None
|
||||
_arlo_mfa_complete_auth = None
|
||||
device_discovery_lock: asyncio.Lock = None
|
||||
|
||||
plugin_verbosity_choices = {
|
||||
"Normal": logging.INFO,
|
||||
@@ -50,6 +51,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
self.imap = None
|
||||
self.imap_signal = None
|
||||
self.imap_skip_emails = None
|
||||
self.device_discovery_lock = asyncio.Lock()
|
||||
|
||||
self.propagate_verbosity()
|
||||
self.propagate_transport()
|
||||
@@ -188,14 +190,11 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
|
||||
async def do_arlo_setup(self) -> None:
|
||||
try:
|
||||
await self.discoverDevices()
|
||||
await self.discover_devices()
|
||||
await self.arlo.Subscribe([
|
||||
(self.arlo_basestations[camera["parentId"]], camera) for camera in self.arlo_cameras.values()
|
||||
])
|
||||
|
||||
for nativeId in self.arlo_cameras.keys():
|
||||
await self.getDevice(nativeId)
|
||||
|
||||
self.arlo.event_stream.set_refresh_interval(self.refresh_interval)
|
||||
except requests.exceptions.HTTPError as e:
|
||||
traceback.print_exc()
|
||||
@@ -472,16 +471,16 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
{
|
||||
"group": "General",
|
||||
"key": "plugin_verbosity",
|
||||
"title": "Plugin Verbosity",
|
||||
"description": "Select the verbosity of this plugin. 'Verbose' will show debugging messages, "
|
||||
"including events received from connected Arlo cameras.",
|
||||
"value": self.plugin_verbosity,
|
||||
"choices": sorted(self.plugin_verbosity_choices.keys()),
|
||||
"title": "Verbose Logging",
|
||||
"description": "Enable this option to show debug messages, including events received from connected Arlo cameras.",
|
||||
"value": self.plugin_verbosity == "Verbose",
|
||||
"type": "boolean",
|
||||
},
|
||||
])
|
||||
|
||||
return results
|
||||
|
||||
@async_print_exception_guard
|
||||
async def putSetting(self, key: str, value: SettingValue) -> None:
|
||||
if not self.validate_setting(key, value):
|
||||
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
|
||||
@@ -493,13 +492,14 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
elif key == "force_reauth":
|
||||
# force arlo client to be invalidated and reloaded
|
||||
self.invalidate_arlo_client()
|
||||
elif key == "plugin_verbosity":
|
||||
self.storage.setItem(key, "Verbose" if value == "true" or value == True else "Normal")
|
||||
self.propagate_verbosity()
|
||||
skip_arlo_client = True
|
||||
else:
|
||||
self.storage.setItem(key, value)
|
||||
|
||||
if key == "plugin_verbosity":
|
||||
self.propagate_verbosity()
|
||||
skip_arlo_client = True
|
||||
elif key == "arlo_transport":
|
||||
if key == "arlo_transport":
|
||||
self.propagate_transport()
|
||||
# force arlo client to be invalidated and reloaded, but
|
||||
# keep any mfa codes
|
||||
@@ -558,7 +558,12 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
return False
|
||||
return True
|
||||
|
||||
async def discoverDevices(self, duration: int = 0) -> None:
|
||||
@async_print_exception_guard
|
||||
async def discover_devices(self) -> None:
|
||||
async with self.device_discovery_lock:
|
||||
return await self.discover_devices_impl()
|
||||
|
||||
async def discover_devices_impl(self) -> None:
|
||||
if not self.arlo:
|
||||
raise Exception("Arlo client not connected, cannot discover devices")
|
||||
|
||||
@@ -573,16 +578,17 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
basestations = self.arlo.GetDevices(['basestation', 'siren'])
|
||||
for basestation in basestations:
|
||||
nativeId = basestation["deviceId"]
|
||||
self.logger.debug(f"Adding {nativeId}")
|
||||
|
||||
if nativeId in self.arlo_basestations:
|
||||
self.logger.info(f"Skipping basestation {nativeId} ({basestation['modelId']}) as it has already been added")
|
||||
continue
|
||||
self.arlo_basestations[nativeId] = basestation
|
||||
|
||||
device = await self.getDevice(nativeId)
|
||||
device = await self.getDevice_impl(nativeId)
|
||||
scrypted_interfaces = device.get_applicable_interfaces()
|
||||
manifest = device.get_device_manifest()
|
||||
self.logger.info(f"Interfaces for {nativeId} ({basestation['modelId']}): {scrypted_interfaces}")
|
||||
self.logger.debug(f"Interfaces for {nativeId} ({basestation['modelId']}): {scrypted_interfaces}")
|
||||
|
||||
# for basestations, we want to add them to the top level DeviceProvider
|
||||
provider_to_device_map.setdefault(None, []).append(manifest)
|
||||
@@ -601,11 +607,13 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
|
||||
cameras = self.arlo.GetDevices(['camera', "arloq", "arloqs", "doorbell"])
|
||||
for camera in cameras:
|
||||
nativeId = camera["deviceId"]
|
||||
self.logger.debug(f"Adding {nativeId}")
|
||||
|
||||
if camera["deviceId"] != camera["parentId"] and camera["parentId"] not in self.arlo_basestations:
|
||||
self.logger.info(f"Skipping camera {camera['deviceId']} ({camera['modelId']}) because its basestation was not found")
|
||||
continue
|
||||
|
||||
nativeId = camera["deviceId"]
|
||||
if nativeId in self.arlo_cameras:
|
||||
self.logger.info(f"Skipping camera {nativeId} ({camera['modelId']}) as it has already been added")
|
||||
continue
|
||||
@@ -616,10 +624,10 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
# own basestation
|
||||
self.arlo_basestations[camera["deviceId"]] = camera
|
||||
|
||||
device: ArloDeviceBase = await self.getDevice(nativeId)
|
||||
device = await self.getDevice_impl(nativeId)
|
||||
scrypted_interfaces = device.get_applicable_interfaces()
|
||||
manifest = device.get_device_manifest()
|
||||
self.logger.info(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
|
||||
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
|
||||
|
||||
if camera["deviceId"] == camera["parentId"]:
|
||||
provider_to_device_map.setdefault(None, []).append(manifest)
|
||||
@@ -656,6 +664,10 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
|
||||
})
|
||||
|
||||
async def getDevice(self, nativeId: str) -> ArloDeviceBase:
|
||||
async with self.device_discovery_lock:
|
||||
return await self.getDevice_impl(nativeId)
|
||||
|
||||
async def getDevice_impl(self, nativeId: str) -> ArloDeviceBase:
|
||||
ret = self.scrypted_devices.get(nativeId, None)
|
||||
if ret is None:
|
||||
ret = self.create_device(nativeId)
|
||||
|
||||
@@ -5,6 +5,7 @@ from typing import List, TYPE_CHECKING
|
||||
from scrypted_sdk.types import OnOff, SecuritySystemMode, ScryptedInterface, ScryptedDeviceType
|
||||
|
||||
from .base import ArloDeviceBase
|
||||
from .util import async_print_exception_guard
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
|
||||
@@ -25,8 +26,9 @@ class ArloSiren(ArloDeviceBase, OnOff):
|
||||
def get_device_type(self) -> str:
|
||||
return ScryptedDeviceType.Siren.value
|
||||
|
||||
@ArloDeviceBase.async_print_exception_guard
|
||||
@async_print_exception_guard
|
||||
async def turnOn(self) -> None:
|
||||
from .basestation import ArloBasestation
|
||||
self.logger.info("Turning on")
|
||||
|
||||
if self.vss.securitySystemState["mode"] == SecuritySystemMode.Disarmed.value:
|
||||
@@ -42,7 +44,12 @@ class ArloSiren(ArloDeviceBase, OnOff):
|
||||
}
|
||||
return
|
||||
|
||||
self.provider.arlo.SirenOn(self.arlo_device)
|
||||
if isinstance(self.vss.parent, ArloBasestation):
|
||||
self.logger.debug("Parent device is a basestation")
|
||||
self.provider.arlo.SirenOn(self.arlo_basestation)
|
||||
else:
|
||||
self.logger.debug("Parent device is a camera")
|
||||
self.provider.arlo.SirenOn(self.arlo_basestation, self.arlo_device)
|
||||
|
||||
self.on = True
|
||||
self.vss.securitySystemState = {
|
||||
@@ -50,10 +57,14 @@ class ArloSiren(ArloDeviceBase, OnOff):
|
||||
"triggered": True,
|
||||
}
|
||||
|
||||
@ArloDeviceBase.async_print_exception_guard
|
||||
@async_print_exception_guard
|
||||
async def turnOff(self) -> None:
|
||||
from .basestation import ArloBasestation
|
||||
self.logger.info("Turning off")
|
||||
self.provider.arlo.SirenOff(self.arlo_device)
|
||||
if isinstance(self.vss.parent, ArloBasestation):
|
||||
self.provider.arlo.SirenOff(self.arlo_basestation)
|
||||
else:
|
||||
self.provider.arlo.SirenOff(self.arlo_basestation, self.arlo_device)
|
||||
self.on = False
|
||||
self.vss.securitySystemState = {
|
||||
**self.vss.securitySystemState,
|
||||
|
||||
54
plugins/arlo/src/arlo_plugin/spotlight.py
Normal file
54
plugins/arlo/src/arlo_plugin/spotlight.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import List, TYPE_CHECKING
|
||||
|
||||
from scrypted_sdk.types import OnOff, ScryptedInterface, ScryptedDeviceType
|
||||
|
||||
from .base import ArloDeviceBase
|
||||
from .util import async_print_exception_guard
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
|
||||
from .provider import ArloProvider
|
||||
from .camera import ArloCamera
|
||||
|
||||
|
||||
class ArloSpotlight(ArloDeviceBase, OnOff):
|
||||
camera: ArloCamera = None
|
||||
|
||||
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider, camera: ArloCamera) -> None:
|
||||
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
|
||||
self.camera = camera
|
||||
|
||||
def get_applicable_interfaces(self) -> List[str]:
|
||||
return [ScryptedInterface.OnOff.value]
|
||||
|
||||
def get_device_type(self) -> str:
|
||||
return ScryptedDeviceType.Light.value
|
||||
|
||||
@async_print_exception_guard
|
||||
async def turnOn(self) -> None:
|
||||
self.logger.info("Turning on")
|
||||
self.provider.arlo.SpotlightOn(self.arlo_basestation, self.arlo_device)
|
||||
self.on = True
|
||||
|
||||
@async_print_exception_guard
|
||||
async def turnOff(self) -> None:
|
||||
self.logger.info("Turning off")
|
||||
self.provider.arlo.SpotlightOff(self.arlo_basestation, self.arlo_device)
|
||||
self.on = False
|
||||
|
||||
|
||||
class ArloFloodlight(ArloSpotlight):
|
||||
|
||||
@async_print_exception_guard
|
||||
async def turnOn(self) -> None:
|
||||
self.logger.info("Turning on")
|
||||
self.provider.arlo.FloodlightOn(self.arlo_basestation, self.arlo_device)
|
||||
self.on = True
|
||||
|
||||
@async_print_exception_guard
|
||||
async def turnOff(self) -> None:
|
||||
self.logger.info("Turning off")
|
||||
self.provider.arlo.FloodlightOff(self.arlo_basestation, self.arlo_device)
|
||||
self.on = False
|
||||
@@ -1,4 +1,5 @@
|
||||
import asyncio
|
||||
import traceback
|
||||
|
||||
|
||||
class BackgroundTaskMixin:
|
||||
@@ -25,4 +26,14 @@ class BackgroundTaskMixin:
|
||||
if not hasattr(self, "background_tasks"):
|
||||
return
|
||||
for task in self.background_tasks:
|
||||
task.cancel()
|
||||
task.cancel()
|
||||
|
||||
def async_print_exception_guard(fn):
|
||||
"""Decorator to print an exception's stack trace before re-raising the exception."""
|
||||
async def wrapped(*args, **kwargs):
|
||||
try:
|
||||
return await fn(*args, **kwargs)
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
raise
|
||||
return wrapped
|
||||
@@ -7,21 +7,26 @@ from scrypted_sdk.types import Device, DeviceProvider, Setting, Settings, Settin
|
||||
|
||||
from .base import ArloDeviceBase
|
||||
from .siren import ArloSiren
|
||||
from .util import async_print_exception_guard
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
|
||||
from .provider import ArloProvider
|
||||
from .basestation import ArloBasestation
|
||||
from .camera import ArloCamera
|
||||
|
||||
|
||||
class ArloSirenVirtualSecuritySystem(ArloDeviceBase, SecuritySystem, DeviceProvider):
|
||||
class ArloSirenVirtualSecuritySystem(ArloDeviceBase, SecuritySystem, Settings, Readme, DeviceProvider):
|
||||
"""A virtual, emulated security system that controls when scrypted events can trip the real physical siren."""
|
||||
|
||||
SUPPORTED_MODES = [SecuritySystemMode.AwayArmed.value, SecuritySystemMode.HomeArmed.value, SecuritySystemMode.Disarmed.value]
|
||||
|
||||
siren: ArloSiren = None
|
||||
parent: ArloBasestation | ArloCamera = None
|
||||
|
||||
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
|
||||
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider, parent: ArloBasestation | ArloCamera) -> None:
|
||||
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
|
||||
self.parent = parent
|
||||
self.create_task(self.delayed_init())
|
||||
|
||||
@property
|
||||
@@ -56,7 +61,7 @@ class ArloSirenVirtualSecuritySystem(ArloDeviceBase, SecuritySystem, DeviceProvi
|
||||
}
|
||||
return
|
||||
except Exception as e:
|
||||
self.logger.info(f"Delayed init failed, will try again: {e}")
|
||||
self.logger.debug(f"Delayed init failed, will try again: {e}")
|
||||
await asyncio.sleep(0.1)
|
||||
iterations += 1
|
||||
|
||||
@@ -129,6 +134,7 @@ If this virtual security system is synced to Homekit, the siren device will be m
|
||||
self.siren = ArloSiren(siren_id, self.arlo_device, self.arlo_basestation, self.provider, self)
|
||||
return self.siren
|
||||
|
||||
@async_print_exception_guard
|
||||
async def armSecuritySystem(self, mode: SecuritySystemMode) -> None:
|
||||
self.logger.info(f"Arming {mode}")
|
||||
self.mode = mode
|
||||
@@ -139,7 +145,7 @@ If this virtual security system is synced to Homekit, the siren device will be m
|
||||
if mode == SecuritySystemMode.Disarmed.value:
|
||||
await self.get_or_create_siren().turnOff()
|
||||
|
||||
@ArloDeviceBase.async_print_exception_guard
|
||||
@async_print_exception_guard
|
||||
async def disarmSecuritySystem(self) -> None:
|
||||
self.logger.info(f"Disarming")
|
||||
self.mode = SecuritySystemMode.Disarmed.value
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
paho-mqtt==1.6.1
|
||||
sseclient==0.0.22
|
||||
requests
|
||||
cachetools
|
||||
requests==2.28.2
|
||||
cachetools==5.3.0
|
||||
scrypted-arlo-go==0.0.1
|
||||
--extra-index-url=https://www.piwheels.org/simple/
|
||||
--extra-index-url=https://bjia56.github.io/scrypted-arlo-go/
|
||||
|
||||
4
plugins/core/package-lock.json
generated
4
plugins/core/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.1.103",
|
||||
"version": "0.1.110",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.1.103",
|
||||
"version": "0.1.110",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.1.103",
|
||||
"version": "0.1.110",
|
||||
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -27,25 +27,8 @@ export class Scheduler {
|
||||
];
|
||||
|
||||
const date = new Date();
|
||||
if (schedule.clockType === 'AM' || schedule.clockType === 'PM') {
|
||||
let hour = schedule.hour;
|
||||
if (schedule.clockType === 'AM') {
|
||||
if (hour === 12)
|
||||
hour -= 12;
|
||||
}
|
||||
else {
|
||||
if (hour != 12)
|
||||
hour += 12;
|
||||
}
|
||||
date.setHours(hour);
|
||||
date.setMinutes(schedule.minute, 0, 0);
|
||||
}
|
||||
else if (schedule.clockType === '24HourClock') {
|
||||
date.setHours(schedule.hour, schedule.minute, 0, 0);
|
||||
}
|
||||
else {
|
||||
throw new Error('sunrise/sunset clock not supported');
|
||||
}
|
||||
date.setHours(schedule.hour);
|
||||
date.setMinutes(schedule.minute);
|
||||
|
||||
const ret: ScryptedDevice = {
|
||||
async setName() { },
|
||||
@@ -65,7 +48,7 @@ export class Scheduler {
|
||||
if (!days[day])
|
||||
continue;
|
||||
|
||||
source.log.i(`event will fire at ${future}`);
|
||||
source.log.i(`event will fire at ${future.toLocaleString()}`);
|
||||
return future;
|
||||
}
|
||||
source.log.w('event will never fire');
|
||||
@@ -80,6 +63,7 @@ export class Scheduler {
|
||||
}
|
||||
|
||||
const delay = when.getTime() - Date.now();
|
||||
source.log.i(`event will fire in ${Math.round(delay / 60 / 1000)} minutes.`);
|
||||
|
||||
let timeout = setTimeout(() => {
|
||||
reschedule();
|
||||
|
||||
@@ -92,6 +92,17 @@ class ScryptedCore extends ScryptedDeviceBase implements HttpRequestHandler, Eng
|
||||
this.automationCore = new AutomationCore();
|
||||
})();
|
||||
|
||||
deviceManager.onDeviceDiscovered({
|
||||
name: 'Add to Launcher',
|
||||
nativeId: 'launcher',
|
||||
interfaces: [
|
||||
'@scrypted/launcher-ignore',
|
||||
ScryptedInterface.MixinProvider,
|
||||
ScryptedInterface.Readme,
|
||||
],
|
||||
type: ScryptedDeviceType.Builtin,
|
||||
});
|
||||
|
||||
(async () => {
|
||||
await deviceManager.onDeviceDiscovered(
|
||||
{
|
||||
|
||||
@@ -23,6 +23,15 @@ export class User extends ScryptedDeviceBase implements Settings, ScryptedUser {
|
||||
})
|
||||
|
||||
async getScryptedUserAccessControl(): Promise<ScryptedUserAccessControl> {
|
||||
const usersService = await sdk.systemManager.getComponent('users');
|
||||
const users: DBUser[] = await usersService.getAllUsers();
|
||||
const user = users.find(user => user.username === this.username);
|
||||
if (!user)
|
||||
throw new Error("user not found");
|
||||
|
||||
if (user.admin)
|
||||
return;
|
||||
|
||||
const self = sdk.deviceManager.getDeviceState(this.nativeId);
|
||||
|
||||
const ret: ScryptedUserAccessControl = {
|
||||
|
||||
118
plugins/core/ui/package-lock.json
generated
118
plugins/core/ui/package-lock.json
generated
@@ -13,7 +13,6 @@
|
||||
"@fortawesome/free-solid-svg-icons": "^6.3.0",
|
||||
"@fortawesome/vue-fontawesome": "^2.0.8",
|
||||
"@radial-color-picker/vue-color-picker": "^2.3.0",
|
||||
"@scrypted/client": "file:../../../packages/client",
|
||||
"@scrypted/common": "file:../../../common",
|
||||
"@scrypted/sdk": "file:../../../sdk",
|
||||
"@scrypted/types": "file:../../../sdk/types",
|
||||
@@ -32,6 +31,7 @@
|
||||
"register-service-worker": "^1.7.2",
|
||||
"router": "^1.3.6",
|
||||
"semver": "^6.3.0",
|
||||
"v-calendar": "^2.4.1",
|
||||
"vue": "^2.7.14",
|
||||
"vue-apexcharts": "^1.6.2",
|
||||
"vue-async-computed": "^3.9.0",
|
||||
@@ -118,27 +118,24 @@
|
||||
},
|
||||
"../../../packages/client": {
|
||||
"name": "@scrypted/client",
|
||||
"version": "1.1.37",
|
||||
"version": "1.1.48",
|
||||
"extraneous": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@scrypted/types": "^0.2.64",
|
||||
"adm-zip": "^0.5.9",
|
||||
"@scrypted/types": "^0.2.78",
|
||||
"axios": "^0.25.0",
|
||||
"engine.io-client": "^6.2.2",
|
||||
"linkfs": "^2.1.0",
|
||||
"memfs": "^3.4.1",
|
||||
"engine.io-client": "^6.4.0",
|
||||
"rimraf": "^3.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/adm-zip": "^0.4.34",
|
||||
"@types/ip": "^1.1.0",
|
||||
"@types/node": "^17.0.17",
|
||||
"typescript": "^4.7.4"
|
||||
"@types/node": "^18.14.2",
|
||||
"typescript": "^4.9.5"
|
||||
}
|
||||
},
|
||||
"../../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.68",
|
||||
"version": "0.2.87",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
@@ -175,7 +172,7 @@
|
||||
},
|
||||
"../../../sdk/types": {
|
||||
"name": "@scrypted/types",
|
||||
"version": "0.2.63",
|
||||
"version": "0.2.79",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/rimraf": "^3.0.2",
|
||||
@@ -2265,6 +2262,16 @@
|
||||
"integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@popperjs/core": {
|
||||
"version": "2.11.7",
|
||||
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.7.tgz",
|
||||
"integrity": "sha512-Cr4OjIkipTtcXKjAsm8agyleBuDHvxzeBoa1v543lbv1YaIwQjESsVcmjiWiPEbC1FIeHOG/Op9kdCmAmiS3Kw==",
|
||||
"peer": true,
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/popperjs"
|
||||
}
|
||||
},
|
||||
"node_modules/@radial-color-picker/color-wheel": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@radial-color-picker/color-wheel/-/color-wheel-2.2.0.tgz",
|
||||
@@ -2287,10 +2294,6 @@
|
||||
"vue": "^2.5.21"
|
||||
}
|
||||
},
|
||||
"node_modules/@scrypted/client": {
|
||||
"resolved": "../../../packages/client",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@scrypted/common": {
|
||||
"resolved": "../../../common",
|
||||
"link": true
|
||||
@@ -7819,7 +7822,6 @@
|
||||
"version": "2.24.0",
|
||||
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.24.0.tgz",
|
||||
"integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=0.11"
|
||||
},
|
||||
@@ -7828,6 +7830,14 @@
|
||||
"url": "https://opencollective.com/date-fns"
|
||||
}
|
||||
},
|
||||
"node_modules/date-fns-tz": {
|
||||
"version": "1.3.8",
|
||||
"resolved": "https://registry.npmjs.org/date-fns-tz/-/date-fns-tz-1.3.8.tgz",
|
||||
"integrity": "sha512-qwNXUFtMHTTU6CFSFjoJ80W8Fzzp24LntbjFFBgL/faqds4e5mo9mftoRLgr3Vi1trISsg4awSpYVsOQCRnapQ==",
|
||||
"peerDependencies": {
|
||||
"date-fns": ">=2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/de-indent": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz",
|
||||
@@ -18977,6 +18987,31 @@
|
||||
"uuid": "bin/uuid"
|
||||
}
|
||||
},
|
||||
"node_modules/v-calendar": {
|
||||
"version": "2.4.1",
|
||||
"resolved": "https://registry.npmjs.org/v-calendar/-/v-calendar-2.4.1.tgz",
|
||||
"integrity": "sha512-nhzOlHM2cinv+8jIcnAx+nTo63U40szv3Ig41uLMpGK1U5sApgCP6ggigprsnlMOM5VRq1G/1B8rNHkRrLbGjw==",
|
||||
"dependencies": {
|
||||
"core-js": "^3.15.2",
|
||||
"date-fns": "^2.22.1",
|
||||
"date-fns-tz": "^1.1.4",
|
||||
"lodash": "^4.17.21"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@popperjs/core": "^2.4.0",
|
||||
"vue": "^2.5.18"
|
||||
}
|
||||
},
|
||||
"node_modules/v-calendar/node_modules/core-js": {
|
||||
"version": "3.30.1",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.30.1.tgz",
|
||||
"integrity": "sha512-ZNS5nbiSwDTq4hFosEDqm65izl2CWmLz0hARJMyNQBgkUZMIF51cQiMvIQKA6hvuaeWxQDP3hEedM1JZIgTldQ==",
|
||||
"hasInstallScript": true,
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/core-js"
|
||||
}
|
||||
},
|
||||
"node_modules/v8-compile-cache": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
|
||||
@@ -22773,6 +22808,12 @@
|
||||
"integrity": "sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g==",
|
||||
"dev": true
|
||||
},
|
||||
"@popperjs/core": {
|
||||
"version": "2.11.7",
|
||||
"resolved": "https://registry.npmjs.org/@popperjs/core/-/core-2.11.7.tgz",
|
||||
"integrity": "sha512-Cr4OjIkipTtcXKjAsm8agyleBuDHvxzeBoa1v543lbv1YaIwQjESsVcmjiWiPEbC1FIeHOG/Op9kdCmAmiS3Kw==",
|
||||
"peer": true
|
||||
},
|
||||
"@radial-color-picker/color-wheel": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@radial-color-picker/color-wheel/-/color-wheel-2.2.0.tgz",
|
||||
@@ -22792,22 +22833,6 @@
|
||||
"@radial-color-picker/rotator": "2.1.0"
|
||||
}
|
||||
},
|
||||
"@scrypted/client": {
|
||||
"version": "file:../../../packages/client",
|
||||
"requires": {
|
||||
"@scrypted/types": "^0.2.64",
|
||||
"@types/adm-zip": "^0.4.34",
|
||||
"@types/ip": "^1.1.0",
|
||||
"@types/node": "^17.0.17",
|
||||
"adm-zip": "^0.5.9",
|
||||
"axios": "^0.25.0",
|
||||
"engine.io-client": "^6.2.2",
|
||||
"linkfs": "^2.1.0",
|
||||
"memfs": "^3.4.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"typescript": "^4.7.4"
|
||||
}
|
||||
},
|
||||
"@scrypted/common": {
|
||||
"version": "file:../../../common",
|
||||
"requires": {
|
||||
@@ -27308,8 +27333,13 @@
|
||||
"date-fns": {
|
||||
"version": "2.24.0",
|
||||
"resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.24.0.tgz",
|
||||
"integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw==",
|
||||
"dev": true
|
||||
"integrity": "sha512-6ujwvwgPID6zbI0o7UbURi2vlLDR9uP26+tW6Lg+Ji3w7dd0i3DOcjcClLjLPranT60SSEFBwdSyYwn/ZkPIuw=="
|
||||
},
|
||||
"date-fns-tz": {
|
||||
"version": "1.3.8",
|
||||
"resolved": "https://registry.npmjs.org/date-fns-tz/-/date-fns-tz-1.3.8.tgz",
|
||||
"integrity": "sha512-qwNXUFtMHTTU6CFSFjoJ80W8Fzzp24LntbjFFBgL/faqds4e5mo9mftoRLgr3Vi1trISsg4awSpYVsOQCRnapQ==",
|
||||
"requires": {}
|
||||
},
|
||||
"de-indent": {
|
||||
"version": "1.0.2",
|
||||
@@ -36063,6 +36093,24 @@
|
||||
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==",
|
||||
"dev": true
|
||||
},
|
||||
"v-calendar": {
|
||||
"version": "2.4.1",
|
||||
"resolved": "https://registry.npmjs.org/v-calendar/-/v-calendar-2.4.1.tgz",
|
||||
"integrity": "sha512-nhzOlHM2cinv+8jIcnAx+nTo63U40szv3Ig41uLMpGK1U5sApgCP6ggigprsnlMOM5VRq1G/1B8rNHkRrLbGjw==",
|
||||
"requires": {
|
||||
"core-js": "^3.15.2",
|
||||
"date-fns": "^2.22.1",
|
||||
"date-fns-tz": "^1.1.4",
|
||||
"lodash": "^4.17.21"
|
||||
},
|
||||
"dependencies": {
|
||||
"core-js": {
|
||||
"version": "3.30.1",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.30.1.tgz",
|
||||
"integrity": "sha512-ZNS5nbiSwDTq4hFosEDqm65izl2CWmLz0hARJMyNQBgkUZMIF51cQiMvIQKA6hvuaeWxQDP3hEedM1JZIgTldQ=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"v8-compile-cache": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz",
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
"register-service-worker": "^1.7.2",
|
||||
"router": "^1.3.6",
|
||||
"semver": "^6.3.0",
|
||||
"v-calendar": "^2.4.1",
|
||||
"vue": "^2.7.14",
|
||||
"vue-apexcharts": "^1.6.2",
|
||||
"vue-async-computed": "^3.9.0",
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { timeoutPromise } from "@scrypted/common/src/promise-utils";
|
||||
import { MixinProvider, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, SystemManager } from "@scrypted/types";
|
||||
|
||||
export async function setMixin(systemManager: SystemManager, device: ScryptedDevice, mixinId: string, enabled: boolean) {
|
||||
@@ -14,19 +15,21 @@ export async function setMixin(systemManager: SystemManager, device: ScryptedDev
|
||||
plugins.setMixins(device.id, mixins);
|
||||
}
|
||||
|
||||
export function getAllDevices(systemManager: SystemManager) {
|
||||
return Object.keys(systemManager.getSystemState()).map(id => systemManager.getDeviceById(id)).filter(device => !!device);
|
||||
export function getAllDevices<T>(systemManager: SystemManager) {
|
||||
return Object.keys(systemManager.getSystemState()).map(id => systemManager.getDeviceById(id) as T & ScryptedDevice).filter(device => !!device);
|
||||
}
|
||||
|
||||
export async function getDeviceAvailableMixins(systemManager: SystemManager, device: ScryptedDevice): Promise<(ScryptedDevice & MixinProvider)[]> {
|
||||
const results = await Promise.all(getAllDevices(systemManager).map(async (check) => {
|
||||
const results = await Promise.all(getAllDevices<MixinProvider>(systemManager).map(async (check) => {
|
||||
try {
|
||||
if (check.interfaces.includes(ScryptedInterface.MixinProvider)) {
|
||||
if (await (check as any as MixinProvider).canMixin(device.type, device.interfaces))
|
||||
return check as MixinProvider & ScryptedDevice;
|
||||
const canMixin = await timeoutPromise(5000, check.canMixin(device.type, device.interfaces));
|
||||
if (canMixin)
|
||||
return check;
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
console.warn(check.name, 'canMixin error', e)
|
||||
}
|
||||
}));
|
||||
|
||||
@@ -47,7 +50,7 @@ export async function getMixinProviderAvailableDevices(systemManager: SystemMana
|
||||
devices.map(async (device) => {
|
||||
try {
|
||||
if (device.mixins?.includes(mixinProvider.id) || (await mixinProvider.canMixin(device.type, device.interfaces)))
|
||||
return device;
|
||||
return device;
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
|
||||
@@ -215,6 +215,7 @@ import Notifier from "../interfaces/Notifier.vue";
|
||||
import OnOff from "../interfaces/OnOff.vue";
|
||||
import Brightness from "../interfaces/Brightness.vue";
|
||||
import Battery from "../interfaces/Battery.vue";
|
||||
import Charger from "../interfaces/Charger.vue";
|
||||
import Lock from "../interfaces/Lock.vue";
|
||||
import ColorSettingHsv from "../interfaces/ColorSettingHsv.vue";
|
||||
import ColorSettingRgb from "../interfaces/ColorSettingRgb.vue";
|
||||
@@ -263,6 +264,7 @@ const cardHeaderInterfaces = [
|
||||
ScryptedInterface.AudioSensor,
|
||||
ScryptedInterface.HumiditySensor,
|
||||
ScryptedInterface.Thermometer,
|
||||
ScryptedInterface.Charger,
|
||||
ScryptedInterface.Battery,
|
||||
ScryptedInterface.Lock,
|
||||
ScryptedInterface.OnOff,
|
||||
@@ -362,6 +364,7 @@ export default {
|
||||
|
||||
Lock,
|
||||
OnOff,
|
||||
Charger,
|
||||
Battery,
|
||||
Thermometer,
|
||||
HumiditySensor,
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
<v-btn :dark="!isLive" v-on="on" small :color="isLive ? 'white' : 'blue'" :outlined="isLive">
|
||||
<v-icon small color="white" :outlined="isLive">fa fa-calendar-alt</v-icon> {{ monthDay }}</v-btn>
|
||||
</template>
|
||||
<v-date-picker @input="datePicked"></v-date-picker>
|
||||
<vc-date-picker mode="date" :value="startTime" @input="datePicked"></vc-date-picker>
|
||||
</v-dialog>
|
||||
|
||||
<v-btn v-if="showNvr" :dark="!isLive" small :color="isLive ? 'white' : adjustingTime ? 'green' : 'blue'"
|
||||
@@ -181,8 +181,8 @@ export default {
|
||||
methods: {
|
||||
datePicked(value) {
|
||||
this.dateDialog = false;
|
||||
const dt = datePickerLocalTimeToUTC(value);
|
||||
this.streamRecorder(dt);
|
||||
if (value && value.getTime)
|
||||
this.streamRecorder(value.getTime());
|
||||
},
|
||||
doTimeScroll(e) {
|
||||
if (!this.device.interfaces.includes(ScryptedInterface.VideoRecorder))
|
||||
|
||||
52
plugins/core/ui/src/interfaces/Charger.vue
Normal file
52
plugins/core/ui/src/interfaces/Charger.vue
Normal file
@@ -0,0 +1,52 @@
|
||||
<template>
|
||||
<v-tooltip left>
|
||||
<template v-slot:activator="{ on }">
|
||||
<v-icon
|
||||
v-on="on"
|
||||
v-if="lazyValue.chargeState === Charging"
|
||||
class="mr-1 mr-1"
|
||||
small
|
||||
>fa-plug</v-icon>
|
||||
<v-icon
|
||||
v-on="on"
|
||||
v-else-if="lazyValue.chargeState == Trickle"
|
||||
class="mr-1 mr-1"
|
||||
small
|
||||
>fa-plug-circle-minus</v-icon>
|
||||
<v-icon
|
||||
v-on="on"
|
||||
v-else
|
||||
class="mr-1 mr-1"
|
||||
small
|
||||
>fa-plug-circle-xmark</v-icon>
|
||||
</template>
|
||||
<span>{{ chargeText }}</span>
|
||||
</v-tooltip>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
import { ChargeState } from '@scrypted/types';
|
||||
import RPCInterface from "./RPCInterface.vue";
|
||||
|
||||
export default {
|
||||
mixins: [RPCInterface],
|
||||
data() {
|
||||
return {
|
||||
Charging: ChargeState.Charging,
|
||||
Trickle: ChargeState.Trickle,
|
||||
NotCharging: ChargeState.NotCharging,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
chargeText() {
|
||||
if (this.lazyValue.chargeState === "trickle") {
|
||||
return "Trickle Charging";
|
||||
}
|
||||
if (this.lazyValue.chargeState === "charging") {
|
||||
return "Charging";
|
||||
}
|
||||
return "Not Charging";
|
||||
},
|
||||
},
|
||||
};
|
||||
</script>
|
||||
@@ -81,6 +81,7 @@ export default {
|
||||
const mediaManager = this.$scrypted.mediaManager;
|
||||
const mo = await mediaManager.createMediaObject(buffer, 'image/*');
|
||||
const detected = await this.rpc().detectObjects(mo);
|
||||
console.log(detected);
|
||||
this.lastDetection = detected;
|
||||
},
|
||||
allowDrop(ev) {
|
||||
|
||||
@@ -22,6 +22,7 @@ export default {
|
||||
watch: {
|
||||
device() {
|
||||
this.watchDevice();
|
||||
this.refresh();
|
||||
},
|
||||
},
|
||||
methods: {
|
||||
|
||||
@@ -17,10 +17,21 @@ export default {
|
||||
VueMarkdown,
|
||||
CardTitle,
|
||||
},
|
||||
data() {
|
||||
return {
|
||||
token: 0,
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
refresh() {
|
||||
this.token++;
|
||||
}
|
||||
},
|
||||
asyncComputed: {
|
||||
readme: {
|
||||
async get() {
|
||||
return this.device.getReadmeMarkdown();;
|
||||
await this.token;
|
||||
return this.device.getReadmeMarkdown();
|
||||
},
|
||||
default: undefined,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
<template>
|
||||
<div>
|
||||
<v-checkbox v-if="lazyValue.type === 'boolean'" dense :readonly="lazyValue.readonly" v-model="booleanValue"
|
||||
<vc-date-picker v-if="lazyValue.type === 'date'" mode="date" v-model="dateValue" :is-range="lazyValue.combobox"></vc-date-picker>
|
||||
<vc-date-picker v-else-if="lazyValue.type === 'time'" mode="time" v-model="dateValue"
|
||||
class="hide-header" :is-range="lazyValue.combobox"></vc-date-picker>
|
||||
<vc-date-picker v-else-if="lazyValue.type === 'datetime'" mode="datetime" v-model="dateValue" :is-range="lazyValue.combobox"></vc-date-picker>
|
||||
<v-checkbox v-else-if="lazyValue.type === 'boolean'" dense :readonly="lazyValue.readonly" v-model="booleanValue"
|
||||
:label="lazyValue.title" :hint="lazyValue.description" :placeholder="lazyValue.placeholder" persistent-hint
|
||||
@change="save" :class="lazyValue.description ? 'mb-2' : ''"></v-checkbox>
|
||||
<div v-else-if="lazyValue.type === 'qrcode'">
|
||||
@@ -41,8 +45,7 @@
|
||||
</template>
|
||||
</DevicePicker>
|
||||
<DevicePicker v-else-if="lazyValue.type === 'interface'" v-model="lazyValue.value" :multiple="lazyValue.multiple"
|
||||
:readonly="lazyValue.readonly" :devices="interfaces" :title="lazyValue.title"
|
||||
:description="lazyValue.description">
|
||||
:readonly="lazyValue.readonly" :devices="interfaces" :title="lazyValue.title" :description="lazyValue.description">
|
||||
<template v-slot:append-outer>
|
||||
<v-btn v-if="dirty && device" color="success" @click="save" class="shift-up">
|
||||
<v-icon>send</v-icon>
|
||||
@@ -52,7 +55,7 @@
|
||||
<div v-else-if="lazyValue.type === 'clippath'" class="mb-2">
|
||||
<v-btn small block @click="editZone">{{ lazyValue.title }} </v-btn>
|
||||
<Camera :value="device" :device="device" :clipPathValue="sanitizedClipPathValue" :showDialog="editingZone"
|
||||
:hidePreview="true" @dialog="editingZoneChanged" @clipPath="lazyValue.value = $event"></Camera>
|
||||
:hidePreview="true" @dialog="editingZoneChanged" @clipPath="updateClipPath"></Camera>
|
||||
</div>
|
||||
<v-textarea v-else-if="lazyValue.type === 'textarea'" v-model="lazyValue.value" outlined persistent-hint
|
||||
:hint="lazyValue.description" :label="lazyValue.title">
|
||||
@@ -88,6 +91,7 @@ export default {
|
||||
data() {
|
||||
return {
|
||||
editingZone: false,
|
||||
clipPathThrottle: null,
|
||||
};
|
||||
},
|
||||
watch: {
|
||||
@@ -134,6 +138,25 @@ export default {
|
||||
return [];
|
||||
}
|
||||
},
|
||||
dateValue: {
|
||||
get() {
|
||||
if (this.lazyValue.combobox) {
|
||||
return {
|
||||
start: new Date(parseInt(this.lazyValue.value?.[0]) || Date.now()),
|
||||
end: new Date(parseInt(this.lazyValue.value?.[1]) || Date.now()),
|
||||
};
|
||||
}
|
||||
return new Date(parseInt(this.lazyValue.value) || Date.now());
|
||||
},
|
||||
set(val) {
|
||||
if (this.lazyValue.combobox) {
|
||||
this.lazyValue.value = [val.start.getTime(), val.end.getTime()];
|
||||
}
|
||||
else {
|
||||
this.lazyValue.value = val.getTime();
|
||||
}
|
||||
}
|
||||
},
|
||||
booleanValue: {
|
||||
get() {
|
||||
return (
|
||||
@@ -142,7 +165,7 @@ export default {
|
||||
);
|
||||
},
|
||||
set(val) {
|
||||
this.lazyValue.value = val.toString();
|
||||
this.lazyValue.value = !!val;
|
||||
},
|
||||
},
|
||||
dirty() {
|
||||
@@ -228,6 +251,17 @@ export default {
|
||||
},
|
||||
methods: {
|
||||
onChange() { },
|
||||
updateClipPath(e) {
|
||||
clearTimeout(this.clipPathThrottle);
|
||||
this.clipPathThrottle = setTimeout(() => {
|
||||
this.lazyValue.value = e;
|
||||
this.rpc().putSetting(
|
||||
this.lazyValue.key,
|
||||
this.createInputValue().value
|
||||
);
|
||||
this.onInput();
|
||||
}, 500)
|
||||
},
|
||||
editingZoneChanged(value) {
|
||||
this.editingZone = value;
|
||||
if (!value) {
|
||||
@@ -240,6 +274,7 @@ export default {
|
||||
},
|
||||
createLazyValue() {
|
||||
var type = this.value.type || "";
|
||||
|
||||
if (type.indexOf("[]") == -1 && type !== "clippath") {
|
||||
return cloneDeep(this.value);
|
||||
}
|
||||
@@ -254,6 +289,7 @@ export default {
|
||||
},
|
||||
createInputValue() {
|
||||
var type = this.lazyValue.type || "";
|
||||
|
||||
if (type.indexOf("[]") == -1 && type !== "clippath") {
|
||||
return this.lazyValue;
|
||||
}
|
||||
@@ -276,4 +312,8 @@ export default {
|
||||
.shift-up {
|
||||
margin-top: -8px;
|
||||
}
|
||||
</style>
|
||||
|
||||
.hide-header .vc-date {
|
||||
display: none !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -40,11 +40,11 @@
|
||||
<v-btn v-on="on" small>
|
||||
<v-icon x-small>fa fa-calendar-alt</v-icon>
|
||||
|
||||
{{ year }}-{{ month }}-{{ date }}
|
||||
{{ new Date(date).getFullYear() }}-{{ new Date(date).getMonth() }}-{{ new Date(date).getDate() }}
|
||||
</v-btn>
|
||||
</template>
|
||||
<v-card>
|
||||
<v-date-picker @input="onDate"> </v-date-picker>
|
||||
<vc-date-picker mode="date" @input="onDate" v-model="date"> </vc-date-picker>
|
||||
</v-card>
|
||||
</v-dialog>
|
||||
<v-btn text small disabled v-if="pages">{{ pageRange }}</v-btn>
|
||||
@@ -70,7 +70,6 @@
|
||||
</div>
|
||||
</template>
|
||||
<script>
|
||||
import { datePickerLocalTimeToUTC } from "../common/date";
|
||||
import { fetchClipThumbnail, fetchClipUrl } from "../common/videoclip";
|
||||
import RPCInterface from "./RPCInterface.vue";
|
||||
import Vue from "vue";
|
||||
@@ -129,14 +128,11 @@ export default {
|
||||
clips: {
|
||||
async get() {
|
||||
await this.refreshNonce;
|
||||
const date = new Date();
|
||||
const date = new Date(this.date);
|
||||
date.setMilliseconds(0);
|
||||
date.setSeconds(0);
|
||||
date.setMinutes(0);
|
||||
date.setHours(0);
|
||||
date.setFullYear(this.year);
|
||||
date.setMonth(this.month - 1);
|
||||
date.setDate(this.date);
|
||||
console.log(date);
|
||||
const dt = date.getTime();
|
||||
const ret = await this.device.getVideoClips({
|
||||
@@ -165,9 +161,7 @@ export default {
|
||||
fetchingImages: [],
|
||||
page: 1,
|
||||
dialog: false,
|
||||
date: new Date().getDate(),
|
||||
month: new Date().getMonth() + 1,
|
||||
year: new Date().getFullYear(),
|
||||
date: Date.now(),
|
||||
};
|
||||
},
|
||||
methods: {
|
||||
@@ -202,11 +196,8 @@ export default {
|
||||
onDate(value) {
|
||||
this.page = 1;
|
||||
this.dialog = false;
|
||||
const dt = datePickerLocalTimeToUTC(value);
|
||||
const d = new Date(dt);
|
||||
this.month = d.getMonth() + 1;
|
||||
this.date = d.getDate();
|
||||
this.year = d.getFullYear();
|
||||
console.log(value);
|
||||
this.date = value;
|
||||
this.refresh();
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,33 +1,12 @@
|
||||
<template>
|
||||
<v-layout row wrap justify-center align-center>
|
||||
<v-flex xs3 md2 lg2 xl1 v-for="day of days" :key="day">
|
||||
<v-btn
|
||||
block
|
||||
class="white--text"
|
||||
@click="toggleDay(day)"
|
||||
color="info"
|
||||
small
|
||||
:text="!lazyValue[day]"
|
||||
>{{ day.substring(0, 3) }}</v-btn>
|
||||
<v-btn block class="white--text" @click="toggleDay(day)" color="info" small :text="!lazyValue[day]">{{
|
||||
day.substring(0, 3) }}</v-btn>
|
||||
</v-flex>
|
||||
<v-flex xs12>
|
||||
<v-layout justify-center align-center>
|
||||
<v-time-picker v-model="time" format="24hr" @input="onChange"></v-time-picker>
|
||||
</v-layout>
|
||||
</v-flex>
|
||||
<v-flex xs12>
|
||||
<v-layout justify-center align-center>
|
||||
<v-flex xs12 md8 lg6 xl4>
|
||||
<v-select
|
||||
xs3
|
||||
reverse
|
||||
:items="clockTypes"
|
||||
solo
|
||||
item-value="id"
|
||||
v-model="lazyValue.clockType"
|
||||
@input="onChange"
|
||||
></v-select>
|
||||
</v-flex>
|
||||
<vc-date-picker v-model="time" class="hide-header" @input="onChange" mode="time"></vc-date-picker>
|
||||
</v-layout>
|
||||
</v-flex>
|
||||
</v-layout>
|
||||
@@ -52,62 +31,37 @@ function zeroPrefix(arr, len) {
|
||||
arr.push(i >= 10 ? i.toString() : "0" + i);
|
||||
}
|
||||
}
|
||||
const clockTypes = [
|
||||
{
|
||||
id: "AM",
|
||||
text: "AM"
|
||||
},
|
||||
{
|
||||
id: "PM",
|
||||
text: "PM"
|
||||
},
|
||||
{
|
||||
text: "24 Hour Clock",
|
||||
id: "TwentyFourHourClock"
|
||||
},
|
||||
{
|
||||
text: "Before Sunrise",
|
||||
id: "BeforeSunrise"
|
||||
},
|
||||
{
|
||||
text: "After Sunrise",
|
||||
id: "AfterSunrise"
|
||||
},
|
||||
{
|
||||
text: "Before Sunset",
|
||||
id: "BeforeSunset"
|
||||
},
|
||||
{
|
||||
text: "After Sunset",
|
||||
id: "AfterSunset"
|
||||
}
|
||||
];
|
||||
|
||||
|
||||
zeroPrefix(hours, 24);
|
||||
zeroPrefix(minutes, 59);
|
||||
|
||||
export default {
|
||||
mixins: [RPCInterface],
|
||||
data: function() {
|
||||
data: function () {
|
||||
return {
|
||||
clockTypes,
|
||||
days,
|
||||
};
|
||||
},
|
||||
computed: {
|
||||
time: {
|
||||
get() {
|
||||
return `${this.lazyValue.hour}:${this.lazyValue.minute}`;
|
||||
const date = new Date();
|
||||
date.setMilliseconds(0);
|
||||
date.setSeconds(0);
|
||||
date.setMinutes(this.lazyValue.minute);
|
||||
date.setHours(this.lazyValue.hour);
|
||||
return date;
|
||||
},
|
||||
set(value) {
|
||||
this.lazyValue.hour = value.split(":")[0];
|
||||
this.lazyValue.minute = value.split(":")[1];
|
||||
this.lazyValue.hour = value.getHours();
|
||||
this.lazyValue.minute = value.getMinutes();
|
||||
this.onChange();
|
||||
}
|
||||
}
|
||||
},
|
||||
methods: {
|
||||
toggleDay: function(day) {
|
||||
toggleDay: function (day) {
|
||||
this.lazyValue[day] = !this.lazyValue[day];
|
||||
this.onChange();
|
||||
},
|
||||
@@ -117,11 +71,10 @@ export default {
|
||||
ret.minute = ret.minute || 0;
|
||||
return ret;
|
||||
},
|
||||
onChange: function() {
|
||||
onChange: function () {
|
||||
const schedule = {
|
||||
hour: parseInt(this.lazyValue.hour) || 0,
|
||||
minute: parseInt(this.lazyValue.minute) || 0,
|
||||
clockType: this.lazyValue.clockType || "AM",
|
||||
};
|
||||
days.forEach(day => {
|
||||
schedule[day] = this.lazyValue[day] || false;
|
||||
@@ -139,9 +92,15 @@ export default {
|
||||
-webkit-appearance: none;
|
||||
appearance: none;
|
||||
}
|
||||
|
||||
.semicolon-pad {
|
||||
margin-left: 2px;
|
||||
margin-right: 2px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
|
||||
.hide-header .vc-date {
|
||||
display: none !important;
|
||||
}
|
||||
</style>
|
||||
@@ -10,6 +10,13 @@ import './plugins/is-mobile';
|
||||
import Launcher from './Launcher.vue'
|
||||
import './registerServiceWorker'
|
||||
|
||||
import VCalendar from 'v-calendar';
|
||||
|
||||
// Use v-calendar & v-date-picker components
|
||||
Vue.use(VCalendar, {
|
||||
componentPrefix: 'vc', // Use <vc-calendar /> instead of <v-calendar />
|
||||
});
|
||||
|
||||
// STYLES
|
||||
// Main Theme SCSS
|
||||
// import './assets/scss/theme.scss'
|
||||
|
||||
@@ -58,6 +58,8 @@ import {
|
||||
faLightbulb,
|
||||
faToggleOn,
|
||||
faPlug,
|
||||
faPlugCircleMinus,
|
||||
faPlugCircleXmark,
|
||||
faExclamationTriangle,
|
||||
faSun,
|
||||
faCode,
|
||||
@@ -150,6 +152,8 @@ const icons: IconDefinition[] =[
|
||||
faLightbulb,
|
||||
faToggleOn,
|
||||
faPlug,
|
||||
faPlugCircleMinus,
|
||||
faPlugCircleXmark,
|
||||
faExclamationTriangle,
|
||||
faSun,
|
||||
faCode,
|
||||
|
||||
4
plugins/coreml/package-lock.json
generated
4
plugins/coreml/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/coreml",
|
||||
"version": "0.1.5",
|
||||
"version": "0.1.12",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/coreml",
|
||||
"version": "0.1.5",
|
||||
"version": "0.1.12",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -41,5 +41,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.5"
|
||||
"version": "0.1.12"
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ from PIL import Image
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
|
||||
predictExecutor = concurrent.futures.ThreadPoolExecutor(2, "CoreML-Predict")
|
||||
predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "CoreML-Predict")
|
||||
|
||||
def parse_label_contents(contents: str):
|
||||
lines = contents.splitlines()
|
||||
@@ -42,6 +42,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
|
||||
labels_contents = open(labelsFile, 'r').read()
|
||||
self.labels = parse_label_contents(labels_contents)
|
||||
self.loop = asyncio.get_event_loop()
|
||||
self.minThreshold = .2
|
||||
|
||||
# width, height, channels
|
||||
def get_input_details(self) -> Tuple[int, int, int]:
|
||||
@@ -53,9 +54,9 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
|
||||
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
|
||||
# run in executor if this is the plugin loop
|
||||
if asyncio.get_event_loop() is self.loop:
|
||||
out_dict = await asyncio.get_event_loop().run_in_executor(predictExecutor, lambda: self.model.predict({'image': input, 'confidenceThreshold': .2 }))
|
||||
out_dict = await asyncio.get_event_loop().run_in_executor(predictExecutor, lambda: self.model.predict({'image': input, 'confidenceThreshold': self.minThreshold }))
|
||||
else:
|
||||
out_dict = self.model.predict({'image': input, 'confidenceThreshold': .2 })
|
||||
out_dict = self.model.predict({'image': input, 'confidenceThreshold': self.minThreshold })
|
||||
|
||||
coordinatesList = out_dict['coordinates']
|
||||
|
||||
@@ -65,7 +66,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
|
||||
values = confidenceList
|
||||
maxConfidenceIndex = max(range(len(values)), key=values.__getitem__)
|
||||
maxConfidence = confidenceList[maxConfidenceIndex]
|
||||
if maxConfidence < .2:
|
||||
if maxConfidence < self.minThreshold:
|
||||
continue
|
||||
|
||||
coordinates = coordinatesList[index]
|
||||
@@ -90,6 +91,5 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
|
||||
))
|
||||
objs.append(obj)
|
||||
|
||||
allowList = settings.get('allowList', None) if settings else None
|
||||
ret = self.create_detection_result(objs, src_size, allowList, cvss)
|
||||
ret = self.create_detection_result(objs, src_size, cvss)
|
||||
return ret
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../tensorflow-lite/src/pipeline
|
||||
@@ -1,10 +1,5 @@
|
||||
# plugin
|
||||
Pillow>=5.4.1
|
||||
PyGObject>=3.30.4
|
||||
coremltools~=6.1
|
||||
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
|
||||
coremltools
|
||||
|
||||
# sort_oh
|
||||
scipy
|
||||
filterpy
|
||||
numpy
|
||||
# pillow for anything not intel linux, pillow-simd is available on x64 linux
|
||||
Pillow>=5.4.1; sys_platform != 'linux' or platform_machine != 'x86_64'
|
||||
pillow-simd; sys_platform == 'linux' and platform_machine == 'x86_64'
|
||||
|
||||
6
plugins/ffmpeg-camera/package-lock.json
generated
6
plugins/ffmpeg-camera/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/ffmpeg-camera",
|
||||
"version": "0.0.20",
|
||||
"version": "0.0.21",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/ffmpeg-camera",
|
||||
"version": "0.0.20",
|
||||
"version": "0.0.21",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
@@ -36,7 +36,7 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.68",
|
||||
"version": "0.2.86",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/ffmpeg-camera",
|
||||
"version": "0.0.20",
|
||||
"version": "0.0.21",
|
||||
"description": "FFmpeg Camera Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
|
||||
@@ -144,7 +144,7 @@ export abstract class CameraBase<T extends ResponseMediaStreamOptions> extends S
|
||||
if (key === 'defaultStream') {
|
||||
const vsos = await this.getVideoStreamOptions();
|
||||
const stream = vsos.find(vso => vso.name === value);
|
||||
this.storage.setItem('defaultStream', stream?.id);
|
||||
this.storage.setItem('defaultStream', stream?.id || '');
|
||||
}
|
||||
else {
|
||||
this.storage.setItem(key, value.toString());
|
||||
|
||||
53
plugins/gstreamer-camera/package-lock.json
generated
53
plugins/gstreamer-camera/package-lock.json
generated
@@ -1,13 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/gstreamer-camera",
|
||||
"version": "0.0.3",
|
||||
"version": "0.0.5",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/gstreamer-camera",
|
||||
"version": "0.0.3",
|
||||
"hasInstallScript": true,
|
||||
"version": "0.0.5",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
@@ -37,39 +36,40 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.0.199",
|
||||
"version": "0.2.86",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.16.7",
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"babel-loader": "^8.2.3",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.13.8",
|
||||
"esbuild": "^0.15.9",
|
||||
"ncp": "^2.0.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"rimraf": "^3.0.2",
|
||||
"tmp": "^0.2.1",
|
||||
"webpack": "^5.59.0"
|
||||
"ts-loader": "^9.4.2",
|
||||
"typescript": "^4.9.4",
|
||||
"webpack": "^5.75.0",
|
||||
"webpack-bundle-analyzer": "^4.5.0"
|
||||
},
|
||||
"bin": {
|
||||
"scrypted-changelog": "bin/scrypted-changelog.js",
|
||||
"scrypted-debug": "bin/scrypted-debug.js",
|
||||
"scrypted-deploy": "bin/scrypted-deploy.js",
|
||||
"scrypted-deploy-debug": "bin/scrypted-deploy-debug.js",
|
||||
"scrypted-package-json": "bin/scrypted-package-json.js",
|
||||
"scrypted-readme": "bin/scrypted-readme.js",
|
||||
"scrypted-setup-project": "bin/scrypted-setup-project.js",
|
||||
"scrypted-webpack": "bin/scrypted-webpack.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.11.1",
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/stringify-object": "^4.0.0",
|
||||
"stringify-object": "^3.3.0",
|
||||
"ts-node": "^10.4.0",
|
||||
"typedoc": "^0.22.8",
|
||||
"typescript-json-schema": "^0.50.1",
|
||||
"webpack-bundle-analyzer": "^4.5.0"
|
||||
"typedoc": "^0.23.21"
|
||||
}
|
||||
},
|
||||
"../sdk": {
|
||||
@@ -141,9 +141,9 @@
|
||||
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
|
||||
},
|
||||
"node_modules/url-parse": {
|
||||
"version": "1.5.3",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz",
|
||||
"integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==",
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"dependencies": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
@@ -174,23 +174,24 @@
|
||||
"@scrypted/sdk": {
|
||||
"version": "file:../../sdk",
|
||||
"requires": {
|
||||
"@babel/preset-typescript": "^7.16.7",
|
||||
"@types/node": "^16.11.1",
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/stringify-object": "^4.0.0",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"babel-loader": "^8.2.3",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.13.8",
|
||||
"esbuild": "^0.15.9",
|
||||
"ncp": "^2.0.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"rimraf": "^3.0.2",
|
||||
"stringify-object": "^3.3.0",
|
||||
"tmp": "^0.2.1",
|
||||
"ts-loader": "^9.4.2",
|
||||
"ts-node": "^10.4.0",
|
||||
"typedoc": "^0.22.8",
|
||||
"typescript-json-schema": "^0.50.1",
|
||||
"webpack": "^5.59.0",
|
||||
"typedoc": "^0.23.21",
|
||||
"typescript": "^4.9.4",
|
||||
"webpack": "^5.75.0",
|
||||
"webpack-bundle-analyzer": "^4.5.0"
|
||||
}
|
||||
},
|
||||
@@ -229,9 +230,9 @@
|
||||
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
|
||||
},
|
||||
"url-parse": {
|
||||
"version": "1.5.3",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.3.tgz",
|
||||
"integrity": "sha512-IIORyIQD9rvj0A4CLWsHkBBJuNqWpFQe224b6j9t/ABmquIS0qDU2pY6kl6AuOrL5OkCXHMCFNe1jBcuAggjvQ==",
|
||||
"version": "1.5.10",
|
||||
"resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
|
||||
"integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
|
||||
"requires": {
|
||||
"querystringify": "^2.1.1",
|
||||
"requires-port": "^1.0.0"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/gstreamer-camera",
|
||||
"version": "0.0.3",
|
||||
"version": "0.0.5",
|
||||
"description": "GStreamer Camera Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import sdk, { ScryptedDeviceBase, DeviceProvider, Settings, Setting, ScryptedDeviceType, VideoCamera, MediaObject, MediaStreamOptions, ScryptedInterface, FFmpegInput, Camera, PictureOptions, SettingValue, DeviceCreator, DeviceCreatorSettings } from "@scrypted/sdk";
|
||||
import sdk, { ScryptedDeviceBase, DeviceProvider, Settings, Setting, ScryptedDeviceType, VideoCamera, MediaObject, MediaStreamOptions, ScryptedInterface, FFmpegInput, Camera, PictureOptions, SettingValue, DeviceCreator, DeviceCreatorSettings, ResponseMediaStreamOptions } from "@scrypted/sdk";
|
||||
import { recommendRebroadcast } from "./recommend";
|
||||
import AxiosDigestAuth from '@koush/axios-digest-auth';
|
||||
import https from 'https';
|
||||
@@ -14,7 +14,7 @@ export interface UrlMediaStreamOptions extends MediaStreamOptions {
|
||||
url: string;
|
||||
}
|
||||
|
||||
export abstract class CameraBase<T extends MediaStreamOptions> extends ScryptedDeviceBase implements Camera, VideoCamera, Settings {
|
||||
export abstract class CameraBase<T extends ResponseMediaStreamOptions> extends ScryptedDeviceBase implements Camera, VideoCamera, Settings {
|
||||
snapshotAuth: AxiosDigestAuth;
|
||||
pendingPicture: Promise<MediaObject>;
|
||||
|
||||
@@ -194,7 +194,7 @@ export abstract class CameraBase<T extends MediaStreamOptions> extends ScryptedD
|
||||
if (key === 'defaultStream') {
|
||||
const vsos = await this.getVideoStreamOptions();
|
||||
const stream = vsos.find(vso => vso.name === value);
|
||||
this.storage.setItem('defaultStream', stream?.id);
|
||||
this.storage.setItem('defaultStream', stream?.id || '');
|
||||
}
|
||||
else {
|
||||
this.storage.setItem(key, value.toString());
|
||||
@@ -220,7 +220,7 @@ export abstract class CameraBase<T extends MediaStreamOptions> extends ScryptedD
|
||||
}
|
||||
}
|
||||
|
||||
export abstract class CameraProviderBase<T extends MediaStreamOptions> extends ScryptedDeviceBase implements DeviceProvider, DeviceCreator {
|
||||
export abstract class CameraProviderBase<T extends ResponseMediaStreamOptions> extends ScryptedDeviceBase implements DeviceProvider, DeviceCreator {
|
||||
devices = new Map<string, any>();
|
||||
|
||||
constructor(nativeId?: string) {
|
||||
@@ -234,6 +234,9 @@ export abstract class CameraProviderBase<T extends MediaStreamOptions> extends S
|
||||
recommendRebroadcast();
|
||||
}
|
||||
|
||||
async releaseDevice(id: string, nativeId: string): Promise<void> {
|
||||
}
|
||||
|
||||
async createDevice(settings: DeviceCreatorSettings): Promise<string> {
|
||||
const nativeId = randomBytes(4).toString('hex');
|
||||
const name = settings.newCamera.toString();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import sdk, { FFmpegInput, MediaObject, MediaStreamOptions, Setting, SettingValue } from "@scrypted/sdk";
|
||||
import sdk, { FFmpegInput, MediaObject, MediaStreamOptions, ResponseMediaStreamOptions, Setting, SettingValue } from "@scrypted/sdk";
|
||||
import child_process, { ChildProcess } from "child_process";
|
||||
import { CameraProviderBase, CameraBase, UrlMediaStreamOptions } from "./common";
|
||||
// import {} from "../../../common/src/stream-parser"
|
||||
@@ -8,10 +8,10 @@ import { listenZero } from "../../../common/src/listen-cluster"
|
||||
|
||||
const { log, deviceManager, mediaManager } = sdk;
|
||||
|
||||
class GStreamerCamera extends CameraBase<MediaStreamOptions> {
|
||||
class GStreamerCamera extends CameraBase<ResponseMediaStreamOptions> {
|
||||
currentProcess: ChildProcess;
|
||||
|
||||
createGStreamerMediaStreamOptions(gstreamerInput: string, index: number): MediaStreamOptions {
|
||||
createGStreamerMediaStreamOptions(gstreamerInput: string, index: number): ResponseMediaStreamOptions {
|
||||
return {
|
||||
id: `channel${index}`,
|
||||
name: `Stream ${index + 1}`,
|
||||
@@ -32,7 +32,7 @@ class GStreamerCamera extends CameraBase<MediaStreamOptions> {
|
||||
return gstreamerInputs;
|
||||
}
|
||||
|
||||
getRawVideoStreamOptions(): MediaStreamOptions[] {
|
||||
getRawVideoStreamOptions(): ResponseMediaStreamOptions[] {
|
||||
const gstreamerInputs = this.getGStreamerInputs();
|
||||
|
||||
// filter out empty strings.
|
||||
@@ -86,7 +86,7 @@ class GStreamerCamera extends CameraBase<MediaStreamOptions> {
|
||||
];
|
||||
}
|
||||
|
||||
async createVideoStream(options?: MediaStreamOptions): Promise<MediaObject> {
|
||||
async createVideoStream(options?: ResponseMediaStreamOptions): Promise<MediaObject> {
|
||||
const index = this.getRawVideoStreamOptions()?.findIndex(vso => vso.id === options.id);
|
||||
const gstreamerInputs = this.getGStreamerInputs();
|
||||
const gstreamerInput = gstreamerInputs[index];
|
||||
@@ -147,7 +147,7 @@ class GStreamerCamera extends CameraBase<MediaStreamOptions> {
|
||||
|
||||
}
|
||||
|
||||
class GStreamerProvider extends CameraProviderBase<MediaStreamOptions> {
|
||||
class GStreamerProvider extends CameraProviderBase<ResponseMediaStreamOptions> {
|
||||
createCamera(nativeId: string): GStreamerCamera {
|
||||
return new GStreamerCamera(nativeId, this);
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ The Channel number is the hundreds digit and (sub-)stream is ones digit:
|
||||
|
||||
# Troubleshooting
|
||||
## General
|
||||
* Not receiving motion alerts in the device's Scrypted event log? Check all of the following: **(1)** device has a motion detection grid drawn and enabled, **(2)** user or group access permissions of account used for device **(3)** do not use self-signed certs for HTTPS on the device, and **(4)** `CGI` and `ISAPI` integration protocol/service on device is enabled.
|
||||
* Not receiving motion alerts in the device's Scrypted event log? Check all of the following: **(1)** device has a motion detection grid drawn and enabled, **(2)** user or group access permissions of account used for device **(3)** do not use self-signed certs for HTTPS on the device, **(4)** `CGI` and `ISAPI` integration protocol/service on device is enabled, and **(5)** that the authentication method on the device is set to "digest".
|
||||
* If device has HTTPS enabled, try disabling HTTPS on the device to see if that resolves issue (do not use self-signed certs).
|
||||
* If device has enabled user lockout, max connections, concurrent requests, etc., try disabling and/or increasing to max allowed for troubleshooting.
|
||||
* Does your account (`Username`) have proper user and/or group permissions? Try granting all permissions for testing.
|
||||
|
||||
503
plugins/hikvision/package-lock.json
generated
503
plugins/hikvision/package-lock.json
generated
@@ -1,28 +1,24 @@
|
||||
{
|
||||
"name": "@scrypted/hikvision",
|
||||
"version": "0.0.124",
|
||||
"version": "0.0.126",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/hikvision",
|
||||
"version": "0.0.124",
|
||||
"version": "0.0.126",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/highland": "^2.12.14",
|
||||
"@types/lodash": "^4.14.172",
|
||||
"@types/multiparty": "^0.0.33",
|
||||
"@types/node": "^16.9.1",
|
||||
"@types/xml2js": "^0.4.9",
|
||||
"axios": "^0.23.0",
|
||||
"highland": "^2.13.5",
|
||||
"lodash": "^4.17.21",
|
||||
"multiparty": "^4.2.2",
|
||||
"net-keepalive": "^3.0.0",
|
||||
"xml2js": "^0.4.23"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
},
|
||||
"../../common": {
|
||||
@@ -42,7 +38,7 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.68",
|
||||
"version": "0.2.87",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
@@ -105,31 +101,10 @@
|
||||
"resolved": "../../sdk",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@types/highland": {
|
||||
"version": "2.12.14",
|
||||
"resolved": "https://registry.npmjs.org/@types/highland/-/highland-2.12.14.tgz",
|
||||
"integrity": "sha512-afgFIPeRlysJjWAVmtxqt1nfRo29fjXwooX/MEc+GVlXKMiSsFOryY8hma1PNnjNjOI01Qe37/z5n3WGBk5WCg==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/lodash": {
|
||||
"version": "4.14.172",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.172.tgz",
|
||||
"integrity": "sha512-/BHF5HAx3em7/KkzVKm3LrsD6HZAXuXO1AJZQ3cRRBZj4oHZDviWPYu0aEplAqDFNHZPW6d3G7KN+ONcCCC7pw=="
|
||||
},
|
||||
"node_modules/@types/multiparty": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/@types/multiparty/-/multiparty-0.0.33.tgz",
|
||||
"integrity": "sha512-Il6cJUpSqgojT7NxbVJUvXkCblm50/yEJYtblISDsNIeNYf4yMAhdizzidUk6h8pJ8yhwK/3Fkb+3Dwcgtwl8w==",
|
||||
"dependencies": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "16.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
|
||||
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
|
||||
},
|
||||
"node_modules/@types/xml2js": {
|
||||
"version": "0.4.9",
|
||||
@@ -152,60 +127,6 @@
|
||||
"follow-redirects": "^1.14.4"
|
||||
}
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
|
||||
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
|
||||
"dependencies": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/depd": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
|
||||
"integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/ffi-napi": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ffi-napi/-/ffi-napi-4.0.3.tgz",
|
||||
"integrity": "sha512-PMdLCIvDY9mS32RxZ0XGb95sonPRal8aqRhLbeEtWKZTe2A87qRFG9HjOhvG8EX2UmQw5XNRMIOT+1MYlWmdeg==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"get-uv-event-loop-napi-h": "^1.0.5",
|
||||
"node-addon-api": "^3.0.0",
|
||||
"node-gyp-build": "^4.2.1",
|
||||
"ref-napi": "^2.0.1 || ^3.0.2",
|
||||
"ref-struct-di": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/ffi-napi/node_modules/debug": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
|
||||
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/ffi-napi/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
@@ -225,210 +146,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/get-symbol-from-current-process-h": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/get-symbol-from-current-process-h/-/get-symbol-from-current-process-h-1.0.2.tgz",
|
||||
"integrity": "sha512-syloC6fsCt62ELLrr1VKBM1ggOpMdetX9hTrdW77UQdcApPHLmf7CI7OKcN1c9kYuNxKcDe4iJ4FY9sX3aw2xw=="
|
||||
},
|
||||
"node_modules/get-uv-event-loop-napi-h": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/get-uv-event-loop-napi-h/-/get-uv-event-loop-napi-h-1.0.6.tgz",
|
||||
"integrity": "sha512-t5c9VNR84nRoF+eLiz6wFrEp1SE2Acg0wS+Ysa2zF0eROes+LzOfuTaVHxGy8AbS8rq7FHEJzjnCZo1BupwdJg==",
|
||||
"dependencies": {
|
||||
"get-symbol-from-current-process-h": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/highland": {
|
||||
"version": "2.13.5",
|
||||
"resolved": "https://registry.npmjs.org/highland/-/highland-2.13.5.tgz",
|
||||
"integrity": "sha512-dn2flPapIIAa4BtkB2ahjshg8iSJtrJtdhEb9/oiOrS5HMQTR/GuhFpqJ+11YBdtnl3AwWKvbZd1Uxr8uAmA7A==",
|
||||
"dependencies": {
|
||||
"util-deprecate": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"node_modules/http-errors": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz",
|
||||
"integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==",
|
||||
"dependencies": {
|
||||
"depd": "~1.1.2",
|
||||
"inherits": "2.0.4",
|
||||
"setprototypeof": "1.2.0",
|
||||
"statuses": ">= 1.5.0 < 2",
|
||||
"toidentifier": "1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"node_modules/lodash": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"node_modules/multiparty": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/multiparty/-/multiparty-4.2.2.tgz",
|
||||
"integrity": "sha512-NtZLjlvsjcoGrzojtwQwn/Tm90aWJ6XXtPppYF4WmOk/6ncdwMMKggFY2NlRRN9yiCEIVxpOfPWahVEG2HAG8Q==",
|
||||
"dependencies": {
|
||||
"http-errors": "~1.8.0",
|
||||
"safe-buffer": "5.2.1",
|
||||
"uid-safe": "2.1.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.10"
|
||||
}
|
||||
},
|
||||
"node_modules/net-keepalive": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/net-keepalive/-/net-keepalive-3.0.0.tgz",
|
||||
"integrity": "sha512-wfDa7VPeSltY5aIQcujS7AiWnO2JHJCpO3is4nwQ7kFYs4YMpzDNMwiuILPkWwgMbPMSHzO7O1tuL8rC0SP3ag==",
|
||||
"dependencies": {
|
||||
"ffi-napi": "^4.0.1",
|
||||
"ref-napi": "^3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10.20.0"
|
||||
}
|
||||
},
|
||||
"node_modules/node-addon-api": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz",
|
||||
"integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A=="
|
||||
},
|
||||
"node_modules/node-gyp-build": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz",
|
||||
"integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==",
|
||||
"bin": {
|
||||
"node-gyp-build": "bin.js",
|
||||
"node-gyp-build-optional": "optional.js",
|
||||
"node-gyp-build-test": "build-test.js"
|
||||
}
|
||||
},
|
||||
"node_modules/random-bytes": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz",
|
||||
"integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs=",
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/ref-napi": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ref-napi/-/ref-napi-3.0.3.tgz",
|
||||
"integrity": "sha512-LiMq/XDGcgodTYOMppikEtJelWsKQERbLQsYm0IOOnzhwE9xYZC7x8txNnFC9wJNOkPferQI4vD4ZkC0mDyrOA==",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"get-symbol-from-current-process-h": "^1.0.2",
|
||||
"node-addon-api": "^3.0.0",
|
||||
"node-gyp-build": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ref-napi/node_modules/debug": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
|
||||
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
|
||||
"dependencies": {
|
||||
"ms": "2.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"supports-color": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/ref-napi/node_modules/ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
},
|
||||
"node_modules/ref-struct-di": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ref-struct-di/-/ref-struct-di-1.1.1.tgz",
|
||||
"integrity": "sha512-2Xyn/0Qgz89VT+++WP0sTosdm9oeowLP23wRJYhG4BFdMUrLj3jhwHZNEytYNYgtPKLNTP3KJX4HEgBvM1/Y2g==",
|
||||
"dependencies": {
|
||||
"debug": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/feross"
|
||||
},
|
||||
{
|
||||
"type": "patreon",
|
||||
"url": "https://www.patreon.com/feross"
|
||||
},
|
||||
{
|
||||
"type": "consulting",
|
||||
"url": "https://feross.org/support"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/sax": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
|
||||
},
|
||||
"node_modules/setprototypeof": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
|
||||
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
|
||||
},
|
||||
"node_modules/statuses": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
|
||||
"integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=",
|
||||
"engines": {
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/toidentifier": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
|
||||
"integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==",
|
||||
"engines": {
|
||||
"node": ">=0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/uid-safe": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz",
|
||||
"integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==",
|
||||
"dependencies": {
|
||||
"random-bytes": "~1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.8"
|
||||
}
|
||||
},
|
||||
"node_modules/util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
|
||||
},
|
||||
"node_modules/xml2js": {
|
||||
"version": "0.4.23",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
|
||||
@@ -505,31 +232,10 @@
|
||||
"webpack-bundle-analyzer": "^4.5.0"
|
||||
}
|
||||
},
|
||||
"@types/highland": {
|
||||
"version": "2.12.14",
|
||||
"resolved": "https://registry.npmjs.org/@types/highland/-/highland-2.12.14.tgz",
|
||||
"integrity": "sha512-afgFIPeRlysJjWAVmtxqt1nfRo29fjXwooX/MEc+GVlXKMiSsFOryY8hma1PNnjNjOI01Qe37/z5n3WGBk5WCg==",
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/lodash": {
|
||||
"version": "4.14.172",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.172.tgz",
|
||||
"integrity": "sha512-/BHF5HAx3em7/KkzVKm3LrsD6HZAXuXO1AJZQ3cRRBZj4oHZDviWPYu0aEplAqDFNHZPW6d3G7KN+ONcCCC7pw=="
|
||||
},
|
||||
"@types/multiparty": {
|
||||
"version": "0.0.33",
|
||||
"resolved": "https://registry.npmjs.org/@types/multiparty/-/multiparty-0.0.33.tgz",
|
||||
"integrity": "sha512-Il6cJUpSqgojT7NxbVJUvXkCblm50/yEJYtblISDsNIeNYf4yMAhdizzidUk6h8pJ8yhwK/3Fkb+3Dwcgtwl8w==",
|
||||
"requires": {
|
||||
"@types/node": "*"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "16.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
|
||||
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
|
||||
},
|
||||
"@types/xml2js": {
|
||||
"version": "0.4.9",
|
||||
@@ -552,206 +258,21 @@
|
||||
"follow-redirects": "^1.14.4"
|
||||
}
|
||||
},
|
||||
"debug": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz",
|
||||
"integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==",
|
||||
"requires": {
|
||||
"ms": "2.0.0"
|
||||
}
|
||||
},
|
||||
"depd": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
|
||||
"integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak="
|
||||
},
|
||||
"ffi-napi": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ffi-napi/-/ffi-napi-4.0.3.tgz",
|
||||
"integrity": "sha512-PMdLCIvDY9mS32RxZ0XGb95sonPRal8aqRhLbeEtWKZTe2A87qRFG9HjOhvG8EX2UmQw5XNRMIOT+1MYlWmdeg==",
|
||||
"requires": {
|
||||
"debug": "^4.1.1",
|
||||
"get-uv-event-loop-napi-h": "^1.0.5",
|
||||
"node-addon-api": "^3.0.0",
|
||||
"node-gyp-build": "^4.2.1",
|
||||
"ref-napi": "^2.0.1 || ^3.0.2",
|
||||
"ref-struct-di": "^1.1.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
|
||||
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.15.1",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz",
|
||||
"integrity": "sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA=="
|
||||
},
|
||||
"get-symbol-from-current-process-h": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/get-symbol-from-current-process-h/-/get-symbol-from-current-process-h-1.0.2.tgz",
|
||||
"integrity": "sha512-syloC6fsCt62ELLrr1VKBM1ggOpMdetX9hTrdW77UQdcApPHLmf7CI7OKcN1c9kYuNxKcDe4iJ4FY9sX3aw2xw=="
|
||||
},
|
||||
"get-uv-event-loop-napi-h": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/get-uv-event-loop-napi-h/-/get-uv-event-loop-napi-h-1.0.6.tgz",
|
||||
"integrity": "sha512-t5c9VNR84nRoF+eLiz6wFrEp1SE2Acg0wS+Ysa2zF0eROes+LzOfuTaVHxGy8AbS8rq7FHEJzjnCZo1BupwdJg==",
|
||||
"requires": {
|
||||
"get-symbol-from-current-process-h": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"highland": {
|
||||
"version": "2.13.5",
|
||||
"resolved": "https://registry.npmjs.org/highland/-/highland-2.13.5.tgz",
|
||||
"integrity": "sha512-dn2flPapIIAa4BtkB2ahjshg8iSJtrJtdhEb9/oiOrS5HMQTR/GuhFpqJ+11YBdtnl3AwWKvbZd1Uxr8uAmA7A==",
|
||||
"requires": {
|
||||
"util-deprecate": "^1.0.2"
|
||||
}
|
||||
},
|
||||
"http-errors": {
|
||||
"version": "1.8.0",
|
||||
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz",
|
||||
"integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==",
|
||||
"requires": {
|
||||
"depd": "~1.1.2",
|
||||
"inherits": "2.0.4",
|
||||
"setprototypeof": "1.2.0",
|
||||
"statuses": ">= 1.5.0 < 2",
|
||||
"toidentifier": "1.0.0"
|
||||
}
|
||||
},
|
||||
"inherits": {
|
||||
"version": "2.0.4",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
|
||||
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.21",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
|
||||
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
|
||||
},
|
||||
"multiparty": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/multiparty/-/multiparty-4.2.2.tgz",
|
||||
"integrity": "sha512-NtZLjlvsjcoGrzojtwQwn/Tm90aWJ6XXtPppYF4WmOk/6ncdwMMKggFY2NlRRN9yiCEIVxpOfPWahVEG2HAG8Q==",
|
||||
"requires": {
|
||||
"http-errors": "~1.8.0",
|
||||
"safe-buffer": "5.2.1",
|
||||
"uid-safe": "2.1.5"
|
||||
}
|
||||
},
|
||||
"net-keepalive": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/net-keepalive/-/net-keepalive-3.0.0.tgz",
|
||||
"integrity": "sha512-wfDa7VPeSltY5aIQcujS7AiWnO2JHJCpO3is4nwQ7kFYs4YMpzDNMwiuILPkWwgMbPMSHzO7O1tuL8rC0SP3ag==",
|
||||
"requires": {
|
||||
"ffi-napi": "^4.0.1",
|
||||
"ref-napi": "^3.0.0"
|
||||
}
|
||||
},
|
||||
"node-addon-api": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz",
|
||||
"integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A=="
|
||||
},
|
||||
"node-gyp-build": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.3.0.tgz",
|
||||
"integrity": "sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q=="
|
||||
},
|
||||
"random-bytes": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz",
|
||||
"integrity": "sha1-T2ih3Arli9P7lYSMMDJNt11kNgs="
|
||||
},
|
||||
"ref-napi": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/ref-napi/-/ref-napi-3.0.3.tgz",
|
||||
"integrity": "sha512-LiMq/XDGcgodTYOMppikEtJelWsKQERbLQsYm0IOOnzhwE9xYZC7x8txNnFC9wJNOkPferQI4vD4ZkC0mDyrOA==",
|
||||
"requires": {
|
||||
"debug": "^4.1.1",
|
||||
"get-symbol-from-current-process-h": "^1.0.2",
|
||||
"node-addon-api": "^3.0.0",
|
||||
"node-gyp-build": "^4.2.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "4.3.2",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz",
|
||||
"integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==",
|
||||
"requires": {
|
||||
"ms": "2.1.2"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
||||
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"ref-struct-di": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ref-struct-di/-/ref-struct-di-1.1.1.tgz",
|
||||
"integrity": "sha512-2Xyn/0Qgz89VT+++WP0sTosdm9oeowLP23wRJYhG4BFdMUrLj3jhwHZNEytYNYgtPKLNTP3KJX4HEgBvM1/Y2g==",
|
||||
"requires": {
|
||||
"debug": "^3.1.0"
|
||||
}
|
||||
},
|
||||
"safe-buffer": {
|
||||
"version": "5.2.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
|
||||
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
|
||||
},
|
||||
"sax": {
|
||||
"version": "1.2.4",
|
||||
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
|
||||
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
|
||||
},
|
||||
"setprototypeof": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz",
|
||||
"integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="
|
||||
},
|
||||
"statuses": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
|
||||
"integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow="
|
||||
},
|
||||
"toidentifier": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz",
|
||||
"integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
|
||||
},
|
||||
"uid-safe": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz",
|
||||
"integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==",
|
||||
"requires": {
|
||||
"random-bytes": "~1.0.0"
|
||||
}
|
||||
},
|
||||
"util-deprecate": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
|
||||
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
|
||||
},
|
||||
"xml2js": {
|
||||
"version": "0.4.23",
|
||||
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/hikvision",
|
||||
"version": "0.0.124",
|
||||
"version": "0.0.126",
|
||||
"description": "Hikvision Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
@@ -38,10 +38,12 @@
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/node": "^16.9.1",
|
||||
"@types/xml2js": "^0.4.9",
|
||||
"axios": "^0.23.0",
|
||||
"lodash": "^4.17.21",
|
||||
"xml2js": "^0.4.23"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.15.11"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,8 @@ import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
|
||||
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
|
||||
import { HikvisionCameraAPI, HikvisionCameraEvent } from "./hikvision-camera-api";
|
||||
import { hikvisionHttpsAgent } from './probe';
|
||||
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
|
||||
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
|
||||
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
@@ -21,8 +23,8 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
|
||||
detectedChannels: Promise<Map<string, MediaStreamOptions>>;
|
||||
client: HikvisionCameraAPI;
|
||||
onvifIntercom = new OnvifIntercom(this);
|
||||
cp: ChildProcess;
|
||||
|
||||
activeIntercom: Awaited<ReturnType<typeof startRtpForwarderProcess>>;
|
||||
|
||||
constructor(nativeId: string, provider: RtspProvider) {
|
||||
super(nativeId, provider);
|
||||
|
||||
@@ -360,13 +362,11 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
|
||||
|
||||
async startIntercom(media: MediaObject): Promise<void> {
|
||||
if (this.storage.getItem('twoWayAudio') === 'ONVIF') {
|
||||
this.activeIntercom?.kill();
|
||||
this.activeIntercom = undefined;
|
||||
const options = await this.getConstructedVideoStreamOptions();
|
||||
const stream = options[0];
|
||||
const url = new URL(stream.url);
|
||||
// amcrest onvif requires this proto query parameter, or onvif two way
|
||||
// will not activate.
|
||||
url.searchParams.set('proto', 'Onvif');
|
||||
this.onvifIntercom.url = url.toString();
|
||||
this.onvifIntercom.url = stream.url;
|
||||
return this.onvifIntercom.startIntercom(media);
|
||||
}
|
||||
|
||||
@@ -390,7 +390,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
this.console.error('Fialure while determining two way audio codec', e);
|
||||
this.console.error('Failure while determining two way audio codec', e);
|
||||
}
|
||||
|
||||
if (codec === 'G.711ulaw') {
|
||||
@@ -415,76 +415,64 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom {
|
||||
const buffer = await mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput);
|
||||
const ffmpegInput = JSON.parse(buffer.toString()) as FFmpegInput;
|
||||
|
||||
const args = ffmpegInput.inputArguments.slice();
|
||||
args.unshift('-hide_banner');
|
||||
|
||||
args.push(
|
||||
"-vn",
|
||||
'-ar', '8000',
|
||||
'-ac', '1',
|
||||
'-acodec', codec,
|
||||
'-f', format,
|
||||
'pipe:3',
|
||||
);
|
||||
|
||||
this.console.log('ffmpeg intercom', args);
|
||||
|
||||
const ffmpeg = await mediaManager.getFFmpegPath();
|
||||
this.cp = child_process.spawn(ffmpeg, args, {
|
||||
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
|
||||
const passthrough = new PassThrough();
|
||||
const open = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/open`;
|
||||
const { data } = await this.getClient().digestAuth.request({
|
||||
httpsAgent: hikvisionHttpsAgent,
|
||||
method: 'PUT',
|
||||
url: open,
|
||||
});
|
||||
this.cp.on('exit', () => this.cp = undefined);
|
||||
ffmpegLogInitialOutput(this.console, this.cp);
|
||||
const socket = this.cp.stdio[3] as Readable;
|
||||
this.console.log('two way audio opened', data);
|
||||
|
||||
(async () => {
|
||||
const passthrough = new PassThrough();
|
||||
const url = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/audioData`;
|
||||
this.console.log('posting audio data to', url);
|
||||
|
||||
try {
|
||||
const open = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/open`;
|
||||
const { data } = await this.getClient().digestAuth.request({
|
||||
httpsAgent: hikvisionHttpsAgent,
|
||||
method: 'PUT',
|
||||
url: open,
|
||||
});
|
||||
this.console.log('two way audio opened', data);
|
||||
const put = this.getClient().digestAuth.request({
|
||||
httpsAgent: hikvisionHttpsAgent,
|
||||
method: 'PUT',
|
||||
url,
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
// 'Connection': 'close',
|
||||
'Content-Length': '0'
|
||||
},
|
||||
data: passthrough,
|
||||
});
|
||||
|
||||
const url = `http://${this.getHttpAddress()}/ISAPI/System/TwoWayAudio/channels/${channel}/audioData`;
|
||||
this.console.log('posting audio data to', url);
|
||||
|
||||
// seems the dahua doorbells preferred 1024 chunks. should investigate adts
|
||||
// parsing and sending multipart chunks instead.
|
||||
this.getClient().digestAuth.request({
|
||||
httpsAgent: hikvisionHttpsAgent,
|
||||
method: 'PUT',
|
||||
url,
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
// 'Connection': 'close',
|
||||
'Content-Length': '0'
|
||||
},
|
||||
data: passthrough,
|
||||
});
|
||||
|
||||
|
||||
while (true) {
|
||||
const data = await readLength(socket, 1024);
|
||||
passthrough.push(data);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
finally {
|
||||
this.console.log('audio finished');
|
||||
passthrough.end();
|
||||
let available = Buffer.alloc(0);
|
||||
this.activeIntercom?.kill();
|
||||
const forwarder = this.activeIntercom = await startRtpForwarderProcess(this.console, ffmpegInput, {
|
||||
audio: {
|
||||
onRtp: rtp => {
|
||||
const parsed = RtpPacket.deSerialize(rtp);
|
||||
available = Buffer.concat([available, parsed.payload]);
|
||||
if (available.length > 1024) {
|
||||
passthrough.push(available.subarray(0, 1024));
|
||||
available = available.subarray(1024);
|
||||
}
|
||||
},
|
||||
codecCopy: codec,
|
||||
encoderArguments: [
|
||||
'-ar', '8000',
|
||||
'-ac', '1',
|
||||
'-acodec', codec,
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
forwarder.killPromise.finally(() => {
|
||||
this.console.log('audio finished');
|
||||
passthrough.end();
|
||||
this.stopIntercom();
|
||||
})();
|
||||
});
|
||||
|
||||
put.finally(() => forwarder.kill());
|
||||
}
|
||||
|
||||
|
||||
async stopIntercom(): Promise<void> {
|
||||
this.activeIntercom?.kill();
|
||||
this.activeIntercom = undefined;
|
||||
|
||||
if (this.storage.getItem('twoWayAudio') === 'ONVIF') {
|
||||
return this.onvifIntercom.stopIntercom();
|
||||
}
|
||||
|
||||
48
plugins/homekit/package-lock.json
generated
48
plugins/homekit/package-lock.json
generated
@@ -1,25 +1,25 @@
|
||||
{
|
||||
"name": "@scrypted/homekit",
|
||||
"version": "1.2.20",
|
||||
"version": "1.2.23",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/homekit",
|
||||
"version": "1.2.20",
|
||||
"version": "1.2.23",
|
||||
"dependencies": {
|
||||
"@koush/werift-src": "file:../../external/werift",
|
||||
"check-disk-space": "^3.3.1",
|
||||
"hap-nodejs": "^0.11.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mkdirp": "^2.1.5"
|
||||
"mkdirp": "^2.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/debug": "^4.1.7",
|
||||
"@types/lodash": "^4.14.191",
|
||||
"@types/node": "^18.15.5",
|
||||
"@types/lodash": "^4.14.192",
|
||||
"@types/node": "^18.15.11",
|
||||
"@types/url-parse": "^1.4.8"
|
||||
}
|
||||
},
|
||||
@@ -126,7 +126,7 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.85",
|
||||
"version": "0.2.86",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
@@ -276,9 +276,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/lodash": {
|
||||
"version": "4.14.191",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.191.tgz",
|
||||
"integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==",
|
||||
"version": "4.14.192",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.192.tgz",
|
||||
"integrity": "sha512-km+Vyn3BYm5ytMO13k9KTp27O75rbQ0NFw+U//g+PX7VZyjCioXaRFisqSIJRECljcTv73G3i6BpglNGHgUQ5A==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/ms": {
|
||||
@@ -288,9 +288,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "18.15.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.5.tgz",
|
||||
"integrity": "sha512-Ark2WDjjZO7GmvsyFFf81MXuGTA/d6oP38anyxWOL6EREyBKAxKoFHwBhaZxCfLRLpO8JgVXwqOwSwa7jRcjew==",
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/url-parse": {
|
||||
@@ -856,9 +856,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/mkdirp": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.5.tgz",
|
||||
"integrity": "sha512-jbjfql+shJtAPrFoKxHOXip4xS+kul9W3OzfzzrqueWK2QMGon2bFH2opl6W9EagBThjEz+iysyi/swOoVfB/w==",
|
||||
"version": "2.1.6",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.6.tgz",
|
||||
"integrity": "sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A==",
|
||||
"bin": {
|
||||
"mkdirp": "dist/cjs/src/bin.js"
|
||||
},
|
||||
@@ -1276,9 +1276,9 @@
|
||||
}
|
||||
},
|
||||
"@types/lodash": {
|
||||
"version": "4.14.191",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.191.tgz",
|
||||
"integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==",
|
||||
"version": "4.14.192",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.192.tgz",
|
||||
"integrity": "sha512-km+Vyn3BYm5ytMO13k9KTp27O75rbQ0NFw+U//g+PX7VZyjCioXaRFisqSIJRECljcTv73G3i6BpglNGHgUQ5A==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/ms": {
|
||||
@@ -1288,9 +1288,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "18.15.5",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.5.tgz",
|
||||
"integrity": "sha512-Ark2WDjjZO7GmvsyFFf81MXuGTA/d6oP38anyxWOL6EREyBKAxKoFHwBhaZxCfLRLpO8JgVXwqOwSwa7jRcjew==",
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/url-parse": {
|
||||
@@ -1698,9 +1698,9 @@
|
||||
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="
|
||||
},
|
||||
"mkdirp": {
|
||||
"version": "2.1.5",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.5.tgz",
|
||||
"integrity": "sha512-jbjfql+shJtAPrFoKxHOXip4xS+kul9W3OzfzzrqueWK2QMGon2bFH2opl6W9EagBThjEz+iysyi/swOoVfB/w=="
|
||||
"version": "2.1.6",
|
||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.6.tgz",
|
||||
"integrity": "sha512-+hEnITedc8LAtIP9u3HJDFIdcLV2vXP33sqLLIzkv1Db1zO/1OxbvYf0Y1OC/S/Qo5dxHXepofhmxL02PsKe+A=="
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.1.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/homekit",
|
||||
"version": "1.2.20",
|
||||
"version": "1.2.23",
|
||||
"description": "HomeKit Plugin for Scrypted",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
@@ -38,14 +38,14 @@
|
||||
"check-disk-space": "^3.3.1",
|
||||
"hap-nodejs": "^0.11.0",
|
||||
"lodash": "^4.17.21",
|
||||
"mkdirp": "^2.1.5"
|
||||
"mkdirp": "^2.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/debug": "^4.1.7",
|
||||
"@types/lodash": "^4.14.191",
|
||||
"@types/node": "^18.15.5",
|
||||
"@types/lodash": "^4.14.192",
|
||||
"@types/node": "^18.15.11",
|
||||
"@types/url-parse": "^1.4.8"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,7 +192,7 @@ The latest troubleshooting guide for all known streaming or recording issues can
|
||||
this.storage.setItem(key, JSON.stringify(value));
|
||||
}
|
||||
else {
|
||||
this.storage.setItem(key, value?.toString());
|
||||
this.storage.setItem(key, value?.toString() || '');
|
||||
}
|
||||
|
||||
if (key === 'detectAudio' || key === 'linkedMotionSensor' || key === 'objectDetectionContactSensors') {
|
||||
|
||||
@@ -303,13 +303,19 @@ addSupportedType({
|
||||
}
|
||||
}
|
||||
|
||||
// if the camera is a device provider, merge in child devices and
|
||||
// ensure the devices are skipped by the rest of homekit by
|
||||
// reporting that they've been merged
|
||||
if (device.interfaces.includes(ScryptedInterface.DeviceProvider)) {
|
||||
// merge in lights
|
||||
const { devices } = mergeOnOffDevicesByType(device as ScryptedDevice as ScryptedDevice & DeviceProvider, accessory, ScryptedDeviceType.Light);
|
||||
mergeOnOffDevicesByType(device as ScryptedDevice as ScryptedDevice & DeviceProvider, accessory, ScryptedDeviceType.Light).devices.forEach(device => {
|
||||
homekitPlugin.mergedDevices.add(device.id)
|
||||
});
|
||||
|
||||
// ensure child devices are skipped by the rest of homekit by
|
||||
// reporting that they've been merged
|
||||
devices.map(device => homekitPlugin.mergedDevices.add(device.id));
|
||||
// merge in sirens
|
||||
mergeOnOffDevicesByType(device as ScryptedDevice as ScryptedDevice & DeviceProvider, accessory, ScryptedDeviceType.Siren).devices.forEach(device => {
|
||||
homekitPlugin.mergedDevices.add(device.id)
|
||||
});
|
||||
}
|
||||
|
||||
return accessory;
|
||||
|
||||
@@ -15,9 +15,9 @@ import os from 'os';
|
||||
import { getAddressOverride } from '../../address-override';
|
||||
import { AudioStreamingCodecType, CameraController, CameraStreamingDelegate, PrepareStreamCallback, PrepareStreamRequest, PrepareStreamResponse, StartStreamRequest, StreamingRequest, StreamRequestCallback, StreamRequestTypes } from '../../hap';
|
||||
import type { HomeKitPlugin } from "../../main";
|
||||
import { createReturnAudioSdp } from './camera-return-audio';
|
||||
import { createSnapshotHandler } from '../camera/camera-snapshot';
|
||||
import { getDebugMode } from './camera-debug-mode-storage';
|
||||
import { createReturnAudioSdp } from './camera-return-audio';
|
||||
import { startCameraStreamFfmpeg } from './camera-streaming-ffmpeg';
|
||||
import { CameraStreamingSession } from './camera-streaming-session';
|
||||
import { getStreamingConfiguration } from './camera-utils';
|
||||
@@ -375,6 +375,12 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
|
||||
let playing = false;
|
||||
session.audioReturn.once('message', async buffer => {
|
||||
try {
|
||||
const decrypted = srtpSession.decrypt(buffer);
|
||||
const rtp = RtpPacket.deSerialize(decrypted);
|
||||
|
||||
if (rtp.header.payloadType !== session.startRequest.audio.pt)
|
||||
return;
|
||||
|
||||
const { clientPromise, url } = await listenZeroSingleClient();
|
||||
const rtspUrl = url.replace('tcp', 'rtsp');
|
||||
let sdp = createReturnAudioSdp(session.startRequest.audio);
|
||||
|
||||
@@ -64,6 +64,9 @@ export class H264Repacketizer {
|
||||
extraPackets = 0;
|
||||
fuaMax: number;
|
||||
pendingFuA: RtpPacket[];
|
||||
// log whether a stapa sps/pps has been seen.
|
||||
// resets on every idr frame, to trigger codec information
|
||||
// to be resent.
|
||||
seenStapASps = false;
|
||||
fuaMin: number;
|
||||
|
||||
@@ -402,8 +405,12 @@ export class H264Repacketizer {
|
||||
// if this is an idr frame, but no sps has been sent via a stapa, dummy one up.
|
||||
// the stream may not contain codec information in stapa or may be sending it
|
||||
// in separate sps/pps packets which is not supported by homekit.
|
||||
if (originalNalType === NAL_TYPE_IDR && !this.seenStapASps)
|
||||
this.maybeSendSpsPps(packet, ret);
|
||||
if (originalNalType === NAL_TYPE_IDR) {
|
||||
if (!this.seenStapASps)
|
||||
this.maybeSendSpsPps(packet, ret);
|
||||
this.seenStapASps = false;
|
||||
}
|
||||
|
||||
}
|
||||
else {
|
||||
if (this.pendingFuA) {
|
||||
@@ -452,6 +459,10 @@ export class H264Repacketizer {
|
||||
if (this.shouldFilter(nalType)) {
|
||||
return false;
|
||||
}
|
||||
if (nalType === NAL_TYPE_SPS)
|
||||
this.updateSps(payload);
|
||||
if (nalType === NAL_TYPE_PPS)
|
||||
this.updatePps(payload);
|
||||
return true;
|
||||
});
|
||||
if (depacketized.length === 0) {
|
||||
@@ -486,10 +497,12 @@ export class H264Repacketizer {
|
||||
return;
|
||||
}
|
||||
|
||||
if (nalType === NAL_TYPE_IDR && !this.seenStapASps) {
|
||||
if (nalType === NAL_TYPE_IDR) {
|
||||
// if this is an idr frame, but no sps has been sent, dummy one up.
|
||||
// the stream may not contain sps.
|
||||
this.maybeSendSpsPps(packet, ret);
|
||||
if (!this.seenStapASps)
|
||||
this.maybeSendSpsPps(packet, ret);
|
||||
this.seenStapASps = false;
|
||||
}
|
||||
|
||||
this.fragment(packet, ret);
|
||||
|
||||
@@ -9,7 +9,7 @@ export function probe(device: DummyDevice): boolean {
|
||||
}
|
||||
|
||||
export function getService(device: ScryptedDevice & OnOff, accessory: Accessory, serviceType: any): Service {
|
||||
const service = accessory.addService(serviceType, device.name);
|
||||
const service = accessory.addService(serviceType, device.name, device.nativeId);
|
||||
service.getCharacteristic(Characteristic.On)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
|
||||
4
plugins/objectdetector/package-lock.json
generated
4
plugins/objectdetector/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.116",
|
||||
"version": "0.0.132",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.116",
|
||||
"version": "0.0.132",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.116",
|
||||
"version": "0.0.132",
|
||||
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
@@ -35,18 +35,16 @@
|
||||
"name": "Video Analysis Plugin",
|
||||
"type": "API",
|
||||
"interfaces": [
|
||||
"DeviceProvider",
|
||||
"Settings",
|
||||
"MixinProvider",
|
||||
"DeviceProvider"
|
||||
"MixinProvider"
|
||||
],
|
||||
"realfs": true,
|
||||
"pluginDependencies": [
|
||||
"@scrypted/python-codecs"
|
||||
]
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"sharp": "^0.31.3"
|
||||
},
|
||||
"optionalDependencies": {},
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
@@ -58,7 +56,6 @@
|
||||
"devDependencies": {
|
||||
"@types/lodash": "^4.14.175",
|
||||
"@types/node": "^14.17.11",
|
||||
"@types/semver": "^7.3.13",
|
||||
"@types/sharp": "^0.31.1"
|
||||
"@types/semver": "^7.3.13"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
export class DenoisedDetectionEntry<T> {
|
||||
id?: string;
|
||||
boundingBox?: [number, number, number, number];
|
||||
name: string;
|
||||
score: number;
|
||||
detection: T;
|
||||
|
||||
firstSeen?: number;
|
||||
firstBox?: [number, number, number, number];
|
||||
lastSeen?: number;
|
||||
lastBox?: [number, number, number, number];
|
||||
durationGone?: number;
|
||||
}
|
||||
|
||||
export interface DenoisedDetectionOptions<T> {
|
||||
added?: (detection: DenoisedDetectionEntry<T>) => void;
|
||||
removed?: (detection: DenoisedDetectionEntry<T>) => void;
|
||||
retained?: (detection: DenoisedDetectionEntry<T>, previous: DenoisedDetectionEntry<T>) => void;
|
||||
untracked?: (detection: DenoisedDetectionOptions<T>) => void,
|
||||
expiring?: (previous: DenoisedDetectionEntry<T>) => void;
|
||||
timeout?: number;
|
||||
now?: number;
|
||||
}
|
||||
|
||||
export interface DenoisedDetectionState<T> {
|
||||
previousDetections?: DenoisedDetectionEntry<T>[];
|
||||
frameCount?: number;
|
||||
lastDetection?: number;
|
||||
// id to time
|
||||
externallyTracked?: Map<string, DenoisedDetectionEntry<T>>;
|
||||
}
|
||||
|
||||
export function denoiseDetections<T>(state: DenoisedDetectionState<T>,
|
||||
currentDetections: DenoisedDetectionEntry<T>[],
|
||||
options?: DenoisedDetectionOptions<T>
|
||||
) {
|
||||
if (!state.previousDetections)
|
||||
state.previousDetections = [];
|
||||
|
||||
const now = options.now || Date.now();
|
||||
const lastDetection = state.lastDetection || now;
|
||||
const sinceLastDetection = now - lastDetection;
|
||||
|
||||
if (!state.externallyTracked)
|
||||
state.externallyTracked = new Map();
|
||||
|
||||
for (const tracked of currentDetections) {
|
||||
tracked.durationGone = 0;
|
||||
tracked.lastSeen = now;
|
||||
tracked.lastBox = tracked.boundingBox;
|
||||
|
||||
if (!tracked.id) {
|
||||
const id = tracked.id = `untracked-${tracked.name}`;
|
||||
if (!state.externallyTracked.get(id)) {
|
||||
// crappy track untracked objects for 1 minute.
|
||||
setTimeout(() => state.externallyTracked.delete(id), 60000);
|
||||
}
|
||||
}
|
||||
|
||||
let previous = state.externallyTracked.get(tracked.id);
|
||||
if (previous) {
|
||||
state.externallyTracked.delete(tracked.id);
|
||||
tracked.firstSeen = previous.firstSeen;
|
||||
tracked.firstBox = previous.firstBox;
|
||||
|
||||
previous.durationGone = 0;
|
||||
previous.lastSeen = now;
|
||||
previous.lastBox = tracked.boundingBox;
|
||||
options?.retained(tracked, previous);
|
||||
}
|
||||
else {
|
||||
tracked.firstSeen = now;
|
||||
tracked.firstBox = tracked.lastBox = tracked.boundingBox;
|
||||
options?.added(tracked);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for (const previous of state.externallyTracked.values()) {
|
||||
if (now - previous.lastSeen) {
|
||||
previous.durationGone += sinceLastDetection;
|
||||
if (previous.durationGone >= options.timeout) {
|
||||
options?.expiring(previous);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const tracked of currentDetections) {
|
||||
state.externallyTracked.set(tracked.id, tracked);
|
||||
}
|
||||
|
||||
}
|
||||
169
plugins/objectdetector/src/ffmpeg-videoframes-no-sharp.ts
Normal file
169
plugins/objectdetector/src/ffmpeg-videoframes-no-sharp.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { Deferred } from "@scrypted/common/src/deferred";
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
|
||||
import { readLength, readLine } from "@scrypted/common/src/read-stream";
|
||||
import sdk, { FFmpegInput, Image, ImageFormat, ImageOptions, MediaObject, ScryptedDeviceBase, ScryptedMimeTypes, VideoFrame, VideoFrameGenerator, VideoFrameGeneratorOptions } from "@scrypted/sdk";
|
||||
import child_process from 'child_process';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
|
||||
interface RawFrame {
|
||||
width: number;
|
||||
height: number;
|
||||
data: Buffer;
|
||||
}
|
||||
|
||||
async function createRawImageMediaObject(image: RawImage): Promise<VideoFrame & MediaObject> {
|
||||
const ret = await sdk.mediaManager.createMediaObject(image, ScryptedMimeTypes.Image, {
|
||||
format: null,
|
||||
timestamp: 0,
|
||||
width: image.width,
|
||||
height: image.height,
|
||||
queued: 0,
|
||||
toBuffer: (options: ImageOptions) => image.toBuffer(options),
|
||||
toImage: (options: ImageOptions) => image.toImage(options),
|
||||
flush: async () => { },
|
||||
});
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
class RawImage implements Image, RawFrame {
|
||||
constructor(public data: Buffer, public width: number, public height: number, public format: ImageFormat) {
|
||||
}
|
||||
|
||||
checkOptions(options: ImageOptions) {
|
||||
if (options?.resize || options?.crop)
|
||||
throw new Error('resize and crop are not supported');
|
||||
if (options?.format && options?.format !== this.format)
|
||||
throw new Error('format not supported');
|
||||
}
|
||||
|
||||
async toBuffer(options: ImageOptions) {
|
||||
this.checkOptions(options);
|
||||
return this.data;
|
||||
}
|
||||
|
||||
async toImage(options: ImageOptions) {
|
||||
this.checkOptions(options);
|
||||
return createRawImageMediaObject(this);
|
||||
}
|
||||
}
|
||||
|
||||
export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements VideoFrameGenerator {
|
||||
async *generateVideoFramesInternal(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): AsyncGenerator<VideoFrame & MediaObject, any, unknown> {
|
||||
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(mediaObject, ScryptedMimeTypes.FFmpegInput);
|
||||
const gray = options?.format === 'gray';
|
||||
const channels = gray ? 1 : 3;
|
||||
const format: ImageFormat = gray ? 'gray' : 'rgb';
|
||||
const vf: string[] = [];
|
||||
if (options?.fps)
|
||||
vf.push(`fps=${options.fps}`);
|
||||
if (options.resize)
|
||||
vf.push(`scale=${options.resize.width}:${options.resize.height}`);
|
||||
const args = [
|
||||
'-hide_banner',
|
||||
//'-hwaccel', 'auto',
|
||||
...ffmpegInput.inputArguments,
|
||||
'-vcodec', 'pam',
|
||||
'-pix_fmt', gray ? 'gray' : 'rgb24',
|
||||
...vf.length ? [
|
||||
'-vf',
|
||||
vf.join(','),
|
||||
] : [],
|
||||
'-f', 'image2pipe',
|
||||
'pipe:3',
|
||||
];
|
||||
|
||||
// this seems to reduce latency.
|
||||
// addVideoFilterArguments(args, 'fps=10', 'fps');
|
||||
|
||||
const cp = child_process.spawn(await sdk.mediaManager.getFFmpegPath(), args, {
|
||||
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
const console = mediaObject?.sourceId ? sdk.deviceManager.getMixinConsole(mediaObject.sourceId) : this.console;
|
||||
safePrintFFmpegArguments(console, args);
|
||||
ffmpegLogInitialOutput(console, cp);
|
||||
|
||||
let finished = false;
|
||||
let frameDeferred: Deferred<RawFrame>;
|
||||
|
||||
const reader = async () => {
|
||||
try {
|
||||
|
||||
const readable = cp.stdio[3] as Readable;
|
||||
const headers = new Map<string, string>();
|
||||
while (!finished) {
|
||||
const line = await readLine(readable);
|
||||
if (line !== 'ENDHDR') {
|
||||
const [key, value] = line.split(' ');
|
||||
headers[key] = value;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
if (headers['TUPLTYPE'] !== 'RGB' && headers['TUPLTYPE'] !== 'GRAYSCALE')
|
||||
throw new Error(`Unexpected TUPLTYPE in PAM stream: ${headers['TUPLTYPE']}`);
|
||||
|
||||
const width = parseInt(headers['WIDTH']);
|
||||
const height = parseInt(headers['HEIGHT']);
|
||||
if (!width || !height)
|
||||
throw new Error('Invalid dimensions in PAM stream');
|
||||
|
||||
const length = width * height * channels;
|
||||
headers.clear();
|
||||
const data = await readLength(readable, length);
|
||||
|
||||
if (frameDeferred) {
|
||||
const f = frameDeferred;
|
||||
frameDeferred = undefined;
|
||||
f.resolve({
|
||||
width,
|
||||
height,
|
||||
data,
|
||||
});
|
||||
}
|
||||
else {
|
||||
// this.console.warn('skipped frame');
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
finally {
|
||||
console.log('finished reader');
|
||||
finished = true;
|
||||
frameDeferred?.reject(new Error('frame generator finished'));
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
reader();
|
||||
while (!finished) {
|
||||
frameDeferred = new Deferred();
|
||||
const raw = await frameDeferred.promise;
|
||||
const { width, height, data } = raw;
|
||||
|
||||
const rawImage = new RawImage(data, width, height, format);
|
||||
try {
|
||||
const mo = await createRawImageMediaObject(rawImage);
|
||||
yield mo;
|
||||
}
|
||||
finally {
|
||||
rawImage.data = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
finally {
|
||||
console.log('finished generator');
|
||||
finished = true;
|
||||
safeKillFFmpeg(cp);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async generateVideoFrames(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): Promise<AsyncGenerator<VideoFrame & MediaObject, any, unknown>> {
|
||||
return this.generateVideoFramesInternal(mediaObject, options, filter);
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,45 @@
|
||||
import { Deferred } from "@scrypted/common/src/deferred";
|
||||
import { addVideoFilterArguments } from "@scrypted/common/src/ffmpeg-helpers";
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
|
||||
import { readLength, readLine } from "@scrypted/common/src/read-stream";
|
||||
import { addVideoFilterArguments } from "@scrypted/common/src/ffmpeg-helpers";
|
||||
import sdk, { FFmpegInput, Image, ImageOptions, MediaObject, ScryptedDeviceBase, ScryptedMimeTypes, VideoFrame, VideoFrameGenerator, VideoFrameGeneratorOptions } from "@scrypted/sdk";
|
||||
import child_process from 'child_process';
|
||||
import sharp from 'sharp';
|
||||
import type sharp from 'sharp';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
export let sharpLib: (input?:
|
||||
| Buffer
|
||||
| Uint8Array
|
||||
| Uint8ClampedArray
|
||||
| Int8Array
|
||||
| Uint16Array
|
||||
| Int16Array
|
||||
| Uint32Array
|
||||
| Int32Array
|
||||
| Float32Array
|
||||
| Float64Array
|
||||
| string,
|
||||
options?: sharp.SharpOptions) => sharp.Sharp;
|
||||
try {
|
||||
sharpLib = require('sharp');
|
||||
}
|
||||
catch (e) {
|
||||
console.warn('Sharp failed to load. FFmpeg Frame Generator will not function properly.')
|
||||
}
|
||||
|
||||
async function createVipsMediaObject(image: VipsImage): Promise<VideoFrame & MediaObject> {
|
||||
const ret = await sdk.mediaManager.createMediaObject(image, ScryptedMimeTypes.Image, {
|
||||
format: null,
|
||||
timestamp: 0,
|
||||
width: image.width,
|
||||
height: image.height,
|
||||
queued: 0,
|
||||
toBuffer: (options: ImageOptions) => image.toBuffer(options),
|
||||
toImage: async (options: ImageOptions) => {
|
||||
const newImage = await image.toVipsImage(options);
|
||||
return createVipsMediaObject(newImage);
|
||||
}
|
||||
},
|
||||
flush: async () => {},
|
||||
});
|
||||
|
||||
return ret;
|
||||
@@ -30,7 +52,7 @@ interface RawFrame {
|
||||
}
|
||||
|
||||
class VipsImage implements Image {
|
||||
constructor(public image: sharp.Sharp, public width: number, public height: number) {
|
||||
constructor(public image: sharp.Sharp, public width: number, public height: number, public channels: number) {
|
||||
}
|
||||
|
||||
toImageInternal(options: ImageOptions) {
|
||||
@@ -55,12 +77,18 @@ class VipsImage implements Image {
|
||||
|
||||
async toBuffer(options: ImageOptions) {
|
||||
const transformed = this.toImageInternal(options);
|
||||
if (options?.format === 'rgb') {
|
||||
transformed.removeAlpha().toFormat('raw');
|
||||
}
|
||||
else if (options?.format === 'jpg') {
|
||||
if (options?.format === 'jpg') {
|
||||
transformed.toFormat('jpg');
|
||||
}
|
||||
else {
|
||||
if (this.channels === 1 && (options?.format === 'gray' || !options.format))
|
||||
transformed.extractChannel(0);
|
||||
else if (options?.format === 'gray')
|
||||
transformed.toColorspace('b-w');
|
||||
else if (options?.format === 'rgb')
|
||||
transformed.removeAlpha()
|
||||
transformed.raw();
|
||||
}
|
||||
return transformed.toBuffer();
|
||||
}
|
||||
|
||||
@@ -70,12 +98,25 @@ class VipsImage implements Image {
|
||||
resolveWithObject: true,
|
||||
});
|
||||
|
||||
const newImage = sharp(data, {
|
||||
const sharpLib = require('sharp') as (input?:
|
||||
| Buffer
|
||||
| Uint8Array
|
||||
| Uint8ClampedArray
|
||||
| Int8Array
|
||||
| Uint16Array
|
||||
| Int16Array
|
||||
| Uint32Array
|
||||
| Int32Array
|
||||
| Float32Array
|
||||
| Float64Array
|
||||
| string,
|
||||
options?) => sharp.Sharp;
|
||||
const newImage = sharpLib(data, {
|
||||
raw: info,
|
||||
});
|
||||
|
||||
const newMetadata = await newImage.metadata();
|
||||
const newVipsImage = new VipsImage(newImage, newMetadata.width, newMetadata.height);
|
||||
const newVipsImage = new VipsImage(newImage, newMetadata.width, newMetadata.height, newMetadata.channels);
|
||||
return newVipsImage;
|
||||
}
|
||||
|
||||
@@ -90,12 +131,14 @@ class VipsImage implements Image {
|
||||
export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements VideoFrameGenerator {
|
||||
async *generateVideoFramesInternal(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): AsyncGenerator<VideoFrame & MediaObject, any, unknown> {
|
||||
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(mediaObject, ScryptedMimeTypes.FFmpegInput);
|
||||
const gray = options?.format === 'gray';
|
||||
const channels = gray ? 1 : 3;
|
||||
const args = [
|
||||
'-hide_banner',
|
||||
//'-hwaccel', 'auto',
|
||||
...ffmpegInput.inputArguments,
|
||||
'-vcodec', 'pam',
|
||||
'-pix_fmt', 'rgb24',
|
||||
'-pix_fmt', gray ? 'gray' : 'rgb24',
|
||||
'-f', 'image2pipe',
|
||||
'pipe:3',
|
||||
];
|
||||
@@ -127,7 +170,7 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
|
||||
}
|
||||
|
||||
|
||||
if (headers['TUPLTYPE'] !== 'RGB')
|
||||
if (headers['TUPLTYPE'] !== 'RGB' && headers['TUPLTYPE'] !== 'GRAYSCALE')
|
||||
throw new Error(`Unexpected TUPLTYPE in PAM stream: ${headers['TUPLTYPE']}`);
|
||||
|
||||
const width = parseInt(headers['WIDTH']);
|
||||
@@ -135,7 +178,7 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
|
||||
if (!width || !height)
|
||||
throw new Error('Invalid dimensions in PAM stream');
|
||||
|
||||
const length = width * height * 3;
|
||||
const length = width * height * channels;
|
||||
headers.clear();
|
||||
const data = await readLength(readable, length);
|
||||
|
||||
@@ -149,7 +192,7 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
|
||||
});
|
||||
}
|
||||
else {
|
||||
this.console.warn('skipped frame');
|
||||
// this.console.warn('skipped frame');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -169,14 +212,14 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
|
||||
const raw = await frameDeferred.promise;
|
||||
const { width, height, data } = raw;
|
||||
|
||||
const image = sharp(data, {
|
||||
const image = sharpLib(data, {
|
||||
raw: {
|
||||
width,
|
||||
height,
|
||||
channels: 3,
|
||||
channels,
|
||||
}
|
||||
});
|
||||
const vipsImage = new VipsImage(image, width, height);
|
||||
const vipsImage = new VipsImage(image, width, height, channels);
|
||||
try {
|
||||
const mo = await createVipsMediaObject(vipsImage);
|
||||
yield mo;
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import sdk, { Camera, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
|
||||
import { Deferred } from '@scrypted/common/src/deferred';
|
||||
import { sleep } from '@scrypted/common/src/sleep';
|
||||
import sdk, { Camera, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionGeneratorResult, ObjectDetectionModel, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
|
||||
import { StorageSettings } from '@scrypted/sdk/storage-settings';
|
||||
import crypto from 'crypto';
|
||||
import cloneDeep from 'lodash/cloneDeep';
|
||||
import { AutoenableMixinProvider } from "../../../common/src/autoenable-mixin-provider";
|
||||
import { SettingsMixinDeviceBase } from "../../../common/src/settings-mixin";
|
||||
import { DenoisedDetectionEntry, DenoisedDetectionState, denoiseDetections } from './denoise';
|
||||
import { FFmpegVideoFrameGenerator } from './ffmpeg-videoframes';
|
||||
import { serverSupportsMixinEventMasking } from './server-version';
|
||||
import { sleep } from './sleep';
|
||||
import { getAllDevices, safeParseJson } from './util';
|
||||
import { FFmpegVideoFrameGenerator } from './ffmpeg-videoframes-no-sharp';
|
||||
|
||||
const polygonOverlap = require('polygon-overlap');
|
||||
const insidePolygon = require('point-inside-polygon');
|
||||
@@ -19,8 +18,6 @@ const defaultDetectionDuration = 20;
|
||||
const defaultDetectionInterval = 60;
|
||||
const defaultDetectionTimeout = 60;
|
||||
const defaultMotionDuration = 10;
|
||||
const defaultScoreThreshold = .2;
|
||||
const defaultSecondScoreThreshold = .7;
|
||||
|
||||
const BUILTIN_MOTION_SENSOR_ASSIST = 'Assist';
|
||||
const BUILTIN_MOTION_SENSOR_REPLACE = 'Replace';
|
||||
@@ -38,15 +35,8 @@ interface ZoneInfo {
|
||||
}
|
||||
type ZoneInfos = { [zone: string]: ZoneInfo };
|
||||
|
||||
type TrackedDetection = ObjectDetectionResult & {
|
||||
newOrBetterDetection?: boolean;
|
||||
bestScore?: number;
|
||||
bestSecondPassScore?: number;
|
||||
};
|
||||
|
||||
class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera & MotionSensor & ObjectDetector> implements ObjectDetector, Settings, ObjectDetectionCallbacks {
|
||||
class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera & MotionSensor & ObjectDetector> implements ObjectDetector, Settings {
|
||||
motionListener: EventListenerRegister;
|
||||
detectorListener: EventListenerRegister;
|
||||
motionMixinListener: EventListenerRegister;
|
||||
detections = new Map<string, MediaObject>();
|
||||
cameraDevice: ScryptedDevice & Camera & VideoCamera & MotionSensor & ObjectDetector;
|
||||
@@ -65,6 +55,10 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
choices,
|
||||
}
|
||||
},
|
||||
onPut: () => {
|
||||
this.endObjectDetection();
|
||||
this.maybeStartMotionDetection();
|
||||
},
|
||||
defaultValue: 'Default',
|
||||
},
|
||||
motionSensorSupplementation: {
|
||||
@@ -81,16 +75,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
this.maybeStartMotionDetection();
|
||||
}
|
||||
},
|
||||
captureMode: {
|
||||
title: 'Capture Mode',
|
||||
description: 'The method to capture frames for analysis. Video will require more processing power.',
|
||||
choices: [
|
||||
'Default',
|
||||
'Video',
|
||||
'Snapshot',
|
||||
],
|
||||
defaultValue: 'Default',
|
||||
},
|
||||
detectionDuration: {
|
||||
title: 'Detection Duration',
|
||||
subgroup: 'Advanced',
|
||||
@@ -121,33 +105,16 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
defaultValue: defaultDetectionInterval,
|
||||
hide: true,
|
||||
},
|
||||
scoreThreshold: {
|
||||
title: 'Minimum Detection Confidence',
|
||||
subgroup: 'Advanced',
|
||||
description: 'Higher values eliminate false positives and low quality recognition candidates.',
|
||||
type: 'number',
|
||||
placeholder: '.2',
|
||||
defaultValue: defaultScoreThreshold,
|
||||
},
|
||||
secondScoreThreshold: {
|
||||
title: 'Second Pass Confidence',
|
||||
subgroup: 'Advanced',
|
||||
description: 'Crop and reanalyze a result from the initial detection pass to get more accurate results.',
|
||||
key: 'secondScoreThreshold',
|
||||
type: 'number',
|
||||
defaultValue: defaultSecondScoreThreshold,
|
||||
placeholder: '.7',
|
||||
},
|
||||
});
|
||||
motionTimeout: NodeJS.Timeout;
|
||||
zones = this.getZones();
|
||||
zoneInfos = this.getZoneInfos();
|
||||
detectionIntervalTimeout: NodeJS.Timeout;
|
||||
detectionState: DenoisedDetectionState<TrackedDetection> = {};
|
||||
detectionId: string;
|
||||
detectorRunning = false;
|
||||
analyzeStop = 0;
|
||||
lastDetectionInput = 0;
|
||||
detectorSignal = new Deferred<void>().resolve();
|
||||
get detectorRunning() {
|
||||
return !this.detectorSignal.finished;
|
||||
}
|
||||
|
||||
constructor(public plugin: ObjectDetectionPlugin, mixinDevice: VideoCamera & Camera & MotionSensor & ObjectDetector & Settings, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: { [key: string]: any }, providerNativeId: string, public objectDetection: ObjectDetection & ScryptedDevice, public model: ObjectDetectionModel, group: string, public hasMotionType: boolean, public settings: Setting[]) {
|
||||
super({
|
||||
@@ -160,7 +127,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
});
|
||||
|
||||
this.cameraDevice = systemManager.getDeviceById<Camera & VideoCamera & MotionSensor & ObjectDetector>(this.id);
|
||||
this.detectionId = model.name + '-' + this.cameraDevice.id;
|
||||
|
||||
this.bindObjectDetection();
|
||||
this.register();
|
||||
@@ -178,7 +144,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
if (this.hasMotionType) {
|
||||
// force a motion detection restart if it quit
|
||||
if (this.motionSensorSupplementation === BUILTIN_MOTION_SENSOR_REPLACE)
|
||||
await this.startStreamAnalysis();
|
||||
this.startPipelineAnalysis();
|
||||
return;
|
||||
}
|
||||
}, this.storageSettings.values.detectionInterval * 1000);
|
||||
@@ -216,91 +182,46 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
return ret;
|
||||
}
|
||||
|
||||
async snapshotDetection() {
|
||||
const picture = await this.cameraDevice.takePicture();
|
||||
let detections = await this.objectDetection.detectObjects(picture, {
|
||||
detectionId: this.detectionId,
|
||||
settings: this.getCurrentSettings(),
|
||||
});
|
||||
detections = await this.trackObjects(detections, true);
|
||||
this.reportObjectDetections(detections);
|
||||
}
|
||||
|
||||
async maybeStartMotionDetection() {
|
||||
if (!this.hasMotionType)
|
||||
return;
|
||||
if (this.motionSensorSupplementation !== BUILTIN_MOTION_SENSOR_REPLACE)
|
||||
return;
|
||||
await this.startStreamAnalysis();
|
||||
this.startPipelineAnalysis();
|
||||
}
|
||||
|
||||
endObjectDetection() {
|
||||
this.detectorRunning = false;
|
||||
this.objectDetection?.detectObjects(undefined, {
|
||||
detectionId: this.detectionId,
|
||||
settings: this.getCurrentSettings(),
|
||||
});
|
||||
this.detectorSignal.resolve();
|
||||
}
|
||||
|
||||
bindObjectDetection() {
|
||||
if (this.hasMotionType)
|
||||
this.motionDetected = false;
|
||||
|
||||
this.detectorRunning = false;
|
||||
this.detectorListener?.removeListener();
|
||||
this.detectorListener = undefined;
|
||||
this.endObjectDetection();
|
||||
|
||||
this.maybeStartMotionDetection();
|
||||
}
|
||||
|
||||
async register() {
|
||||
const model = await this.objectDetection.getDetectionModel();
|
||||
|
||||
if (!this.hasMotionType) {
|
||||
if (model.triggerClasses?.includes('motion')) {
|
||||
this.motionListener = this.cameraDevice.listen(ScryptedInterface.MotionSensor, async () => {
|
||||
if (!this.cameraDevice.motionDetected) {
|
||||
if (this.detectorRunning) {
|
||||
// allow anaysis due to user request.
|
||||
if (this.analyzeStop > Date.now())
|
||||
return;
|
||||
|
||||
this.console.log('motion stopped, cancelling ongoing detection')
|
||||
this.endObjectDetection();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
await this.startStreamAnalysis();
|
||||
});
|
||||
}
|
||||
|
||||
const nonMotion = model.triggerClasses?.find(t => t !== 'motion');
|
||||
if (nonMotion) {
|
||||
this.detectorListener = this.cameraDevice.listen(ScryptedInterface.ObjectDetector, async (s, d, data: ObjectsDetected) => {
|
||||
if (!model.triggerClasses)
|
||||
return;
|
||||
if (!data.detectionId)
|
||||
return;
|
||||
const { detections } = data;
|
||||
if (!detections?.length)
|
||||
return;
|
||||
|
||||
const set = new Set(detections.map(d => d.className));
|
||||
for (const trigger of model.triggerClasses) {
|
||||
if (trigger === 'motion')
|
||||
continue;
|
||||
|
||||
if (set.has(trigger)) {
|
||||
const jpeg = await this.cameraDevice.getDetectionInput(data.detectionId, data.eventId);
|
||||
const found = await this.objectDetection.detectObjects(jpeg);
|
||||
found.detectionId = data.detectionId;
|
||||
this.handleDetectionEvent(found, undefined, jpeg);
|
||||
this.motionListener = this.cameraDevice.listen(ScryptedInterface.MotionSensor, async () => {
|
||||
if (!this.cameraDevice.motionDetected) {
|
||||
if (this.detectorRunning) {
|
||||
// allow anaysis due to user request.
|
||||
if (this.analyzeStop > Date.now())
|
||||
return;
|
||||
}
|
||||
|
||||
this.console.log('motion stopped, cancelling ongoing detection')
|
||||
this.endObjectDetection();
|
||||
}
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
this.startPipelineAnalysis();
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
@@ -317,7 +238,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
return;
|
||||
if (!this.detectorRunning)
|
||||
this.console.log('built in motion sensor started motion, starting video detection.');
|
||||
await this.startStreamAnalysis();
|
||||
this.startPipelineAnalysis();
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -332,177 +253,68 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
}
|
||||
}
|
||||
|
||||
async handleDetectionEvent(detection: ObjectsDetected, redetect?: (boundingBox: [number, number, number, number]) => Promise<ObjectDetectionResult[]>, mediaObject?: MediaObject) {
|
||||
this.detectorRunning = detection.running;
|
||||
startPipelineAnalysis() {
|
||||
if (!this.detectorSignal.finished)
|
||||
return;
|
||||
|
||||
detection = await this.trackObjects(detection);
|
||||
const signal = this.detectorSignal = new Deferred();
|
||||
if (!this.hasMotionType)
|
||||
this.plugin.objectDetectionStarted(this.console);
|
||||
|
||||
// apply the zones to the detections and get a shallow copy list of detections after
|
||||
// exclusion zones have applied
|
||||
const zonedDetections = this.applyZones(detection)
|
||||
.filter(d => {
|
||||
if (!d.zones?.length)
|
||||
return d.bestSecondPassScore >= this.secondScoreThreshold || d.score >= this.scoreThreshold;
|
||||
const options = {
|
||||
snapshotPipeline: this.plugin.shouldUseSnapshotPipeline(),
|
||||
};
|
||||
|
||||
for (const zone of d.zones || []) {
|
||||
const zi = this.zoneInfos[zone];
|
||||
const scoreThreshold = zi?.scoreThreshold || this.scoreThreshold;
|
||||
const secondScoreThreshold = zi?.secondScoreThreshold || this.secondScoreThreshold;
|
||||
// keep the object if it passes the score check, or has already passed a second score check.
|
||||
if (d.bestSecondPassScore >= secondScoreThreshold || d.score >= scoreThreshold)
|
||||
return true;
|
||||
}
|
||||
this.runPipelineAnalysis(signal, options)
|
||||
.catch(e => {
|
||||
this.console.error('Video Analysis ended with error', e);
|
||||
}).finally(() => {
|
||||
if (!this.hasMotionType)
|
||||
this.plugin.objectDetectionEnded(this.console, options.snapshotPipeline);
|
||||
else
|
||||
this.console.log('Video Analysis motion detection ended.');
|
||||
signal.resolve();
|
||||
});
|
||||
}
|
||||
|
||||
let retainImage = false;
|
||||
async runPipelineAnalysis(signal: Deferred<void>, options: {
|
||||
snapshotPipeline: boolean,
|
||||
}) {
|
||||
const start = Date.now();
|
||||
this.analyzeStop = start + this.getDetectionDuration();
|
||||
|
||||
if (!this.hasMotionType && redetect && this.secondScoreThreshold && detection.detections) {
|
||||
const detections = detection.detections as TrackedDetection[];
|
||||
const newOrBetterDetections = zonedDetections.filter(d => d.newOrBetterDetection);
|
||||
detections?.forEach(d => d.newOrBetterDetection = false);
|
||||
|
||||
// anything with a higher pass initial score should be redetected
|
||||
// as it may yield a better second pass score and thus a better thumbnail.
|
||||
await Promise.allSettled(newOrBetterDetections.map(async d => {
|
||||
const maybeUpdateSecondPassScore = (secondPassScore: number) => {
|
||||
let better = false;
|
||||
// initialize second pass result
|
||||
if (!d.bestSecondPassScore) {
|
||||
better = true;
|
||||
d.bestSecondPassScore = 0;
|
||||
}
|
||||
// retain passing the second pass threshold for first time.
|
||||
if (d.bestSecondPassScore < this.secondScoreThreshold && secondPassScore >= this.secondScoreThreshold) {
|
||||
this.console.log('improved', d.id, secondPassScore, d.score);
|
||||
better = true;
|
||||
retainImage = true;
|
||||
}
|
||||
else if (secondPassScore > d.bestSecondPassScore * 1.1) {
|
||||
this.console.log('improved', d.id, secondPassScore, d.score);
|
||||
better = true;
|
||||
retainImage = true;
|
||||
}
|
||||
if (better)
|
||||
d.bestSecondPassScore = secondPassScore;
|
||||
return better;
|
||||
}
|
||||
|
||||
// the initial score may be sufficient.
|
||||
if (d.score >= this.secondScoreThreshold) {
|
||||
maybeUpdateSecondPassScore(d.score);
|
||||
return;
|
||||
}
|
||||
|
||||
const redetected = await redetect(d.boundingBox);
|
||||
const best = redetected.filter(r => r.className === d.className).sort((a, b) => b.score - a.score)?.[0];
|
||||
if (best) {
|
||||
if (maybeUpdateSecondPassScore(best.score)) {
|
||||
d.boundingBox = best.boundingBox;
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
const secondPassDetections = zonedDetections.filter(d => d.bestSecondPassScore >= this.secondScoreThreshold)
|
||||
.map(d => ({
|
||||
...d,
|
||||
score: d.bestSecondPassScore,
|
||||
}));
|
||||
detection.detections = secondPassDetections;
|
||||
}
|
||||
else {
|
||||
detection.detections = zonedDetections;
|
||||
let lastStatusTime = Date.now();
|
||||
let lastStatus = 'starting';
|
||||
const updatePipelineStatus = (status: string) => {
|
||||
lastStatus = status;
|
||||
lastStatusTime = Date.now();
|
||||
}
|
||||
|
||||
if (detection.detections) {
|
||||
const trackedDetections = cloneDeep(detection.detections) as TrackedDetection[];
|
||||
for (const d of trackedDetections) {
|
||||
delete d.bestScore;
|
||||
delete d.bestSecondPassScore;
|
||||
delete d.newOrBetterDetection;
|
||||
let frameGenerator: AsyncGenerator<VideoFrame & MediaObject, void>;
|
||||
let detectionGenerator: AsyncGenerator<ObjectDetectionGeneratorResult, void>;
|
||||
const interval = setInterval(() => {
|
||||
if (Date.now() - lastStatusTime > 30000) {
|
||||
signal.resolve();
|
||||
this.console.error('VideoAnalysis is hung and will terminate:', lastStatus);
|
||||
}
|
||||
detection.detections = trackedDetections;
|
||||
}
|
||||
}, 30000);
|
||||
signal.promise.finally(() => clearInterval(interval));
|
||||
|
||||
const now = Date.now();
|
||||
if (this.lastDetectionInput + this.storageSettings.values.detectionTimeout * 1000 < Date.now())
|
||||
retainImage = true;
|
||||
|
||||
if (retainImage && mediaObject) {
|
||||
this.lastDetectionInput = now;
|
||||
this.setDetection(detection, mediaObject);
|
||||
}
|
||||
|
||||
this.reportObjectDetections(detection);
|
||||
return retainImage;
|
||||
}
|
||||
|
||||
get scoreThreshold() {
|
||||
return parseFloat(this.storage.getItem('scoreThreshold')) || defaultScoreThreshold;
|
||||
}
|
||||
|
||||
get secondScoreThreshold() {
|
||||
const r = parseFloat(this.storage.getItem('secondScoreThreshold'));
|
||||
if (isNaN(r))
|
||||
return defaultSecondScoreThreshold;
|
||||
return r;
|
||||
}
|
||||
|
||||
async onDetection(detection: ObjectsDetected, redetect?: (boundingBox: [number, number, number, number]) => Promise<ObjectDetectionResult[]>, mediaObject?: MediaObject): Promise<boolean> {
|
||||
// detection.detections = detection.detections?.filter(d => d.score >= this.scoreThreshold);
|
||||
return this.handleDetectionEvent(detection, redetect, mediaObject);
|
||||
}
|
||||
|
||||
async onDetectionEnded(detection: ObjectsDetected): Promise<void> {
|
||||
this.handleDetectionEvent(detection);
|
||||
}
|
||||
|
||||
async startSnapshotAnalysis() {
|
||||
if (this.detectorRunning)
|
||||
return;
|
||||
|
||||
this.detectorRunning = true;
|
||||
this.analyzeStop = Date.now() + this.getDetectionDuration();
|
||||
|
||||
while (this.detectorRunning) {
|
||||
const now = Date.now();
|
||||
if (now > this.analyzeStop)
|
||||
break;
|
||||
try {
|
||||
const mo = await this.mixinDevice.takePicture({
|
||||
reason: 'event',
|
||||
});
|
||||
const found = await this.objectDetection.detectObjects(mo, {
|
||||
detectionId: this.detectionId,
|
||||
duration: this.getDetectionDuration(),
|
||||
settings: this.getCurrentSettings(),
|
||||
}, this);
|
||||
let newPipeline: string = this.newPipeline;
|
||||
if (!this.hasMotionType && (!newPipeline || newPipeline === 'Default')) {
|
||||
if (options.snapshotPipeline) {
|
||||
newPipeline = 'Snapshot';
|
||||
this.console.warn(`Due to limited performance, Snapshot mode is being used with ${this.plugin.statsSnapshotConcurrent} actively detecting cameras.`);
|
||||
}
|
||||
catch (e) {
|
||||
this.console.error('snapshot detection error', e);
|
||||
}
|
||||
// cameras tend to only refresh every 1s at best.
|
||||
// maybe get this value from somewhere? or sha the jpeg?
|
||||
const diff = now + 1100 - Date.now();
|
||||
if (diff > 0)
|
||||
await sleep(diff);
|
||||
}
|
||||
this.endObjectDetection();
|
||||
}
|
||||
|
||||
async startPipelineAnalysis() {
|
||||
if (this.detectorRunning)
|
||||
return;
|
||||
|
||||
this.detectorRunning = true;
|
||||
this.analyzeStop = Date.now() + this.getDetectionDuration();
|
||||
|
||||
const newPipeline = this.newPipeline;
|
||||
let generator: () => Promise<AsyncGenerator<VideoFrame & MediaObject>>;
|
||||
if (newPipeline === 'Snapshot' && !this.hasMotionType) {
|
||||
options.snapshotPipeline = true;
|
||||
this.console.log('decoder:', 'Snapshot +', this.objectDetection.name);
|
||||
const self = this;
|
||||
generator = async () => (async function* gen() {
|
||||
frameGenerator = (async function* gen() {
|
||||
try {
|
||||
while (self.detectorRunning) {
|
||||
while (!signal.finished) {
|
||||
const now = Date.now();
|
||||
const sleeper = async () => {
|
||||
const diff = now + 1100 - Date.now();
|
||||
@@ -511,9 +323,11 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
};
|
||||
let image: MediaObject & VideoFrame;
|
||||
try {
|
||||
updatePipelineStatus('takePicture');
|
||||
const mo = await self.cameraDevice.takePicture({
|
||||
reason: 'event',
|
||||
});
|
||||
updatePipelineStatus('converting image');
|
||||
image = await sdk.mediaManager.convertMediaObject(mo, ScryptedMimeTypes.Image);
|
||||
}
|
||||
catch (e) {
|
||||
@@ -523,6 +337,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
}
|
||||
|
||||
// self.console.log('yield')
|
||||
updatePipelineStatus('processing image');
|
||||
yield image;
|
||||
// self.console.log('done yield')
|
||||
await sleeper();
|
||||
@@ -536,16 +351,19 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
else {
|
||||
const destination: MediaStreamDestination = this.hasMotionType ? 'low-resolution' : 'local-recorder';
|
||||
const videoFrameGenerator = systemManager.getDeviceById<VideoFrameGenerator>(newPipeline);
|
||||
this.console.log('decoder:', videoFrameGenerator.name);
|
||||
if (!videoFrameGenerator)
|
||||
throw new Error('invalid VideoFrameGenerator');
|
||||
this.console.log(videoFrameGenerator.name, '+', this.objectDetection.name);
|
||||
updatePipelineStatus('getVideoStream');
|
||||
const stream = await this.cameraDevice.getVideoStream({
|
||||
prebuffer: this.model.prebuffer,
|
||||
destination,
|
||||
// ask rebroadcast to mute audio, not needed.
|
||||
audio: null,
|
||||
});
|
||||
|
||||
generator = async () => videoFrameGenerator.generateVideoFrames(stream, {
|
||||
frameGenerator = await videoFrameGenerator.generateVideoFrames(stream, {
|
||||
queue: 0,
|
||||
resize: this.model?.inputSize ? {
|
||||
width: this.model.inputSize[0],
|
||||
height: this.model.inputSize[1],
|
||||
@@ -554,138 +372,71 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
});
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
let detections = 0;
|
||||
try {
|
||||
for await (const detected
|
||||
of await this.objectDetection.generateObjectDetections(await generator(), {
|
||||
settings: this.getCurrentSettings(),
|
||||
sourceId: this.id,
|
||||
})) {
|
||||
if (!this.detectorRunning) {
|
||||
break;
|
||||
const currentDetections = new Set<string>();
|
||||
let lastReport = 0;
|
||||
detectionGenerator = await sdk.connectRPCObject(await this.objectDetection.generateObjectDetections(frameGenerator, {
|
||||
settings: this.getCurrentSettings(),
|
||||
sourceId: this.id,
|
||||
}));
|
||||
|
||||
updatePipelineStatus('waiting result');
|
||||
|
||||
for await (const detected of detectionGenerator) {
|
||||
if (signal.finished) {
|
||||
break;
|
||||
}
|
||||
if (!this.hasMotionType && Date.now() > this.analyzeStop) {
|
||||
break;
|
||||
}
|
||||
|
||||
// apply the zones to the detections and get a shallow copy list of detections after
|
||||
// exclusion zones have applied
|
||||
const zonedDetections = this.applyZones(detected.detected);
|
||||
detected.detected.detections = zonedDetections;
|
||||
|
||||
// this.console.warn('dps', detections / (Date.now() - start) * 1000);
|
||||
|
||||
if (!this.hasMotionType) {
|
||||
this.plugin.trackDetection();
|
||||
|
||||
for (const d of detected.detected.detections) {
|
||||
currentDetections.add(d.className);
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
if (now > this.analyzeStop) {
|
||||
break;
|
||||
if (now > lastReport + 10000) {
|
||||
const found = [...currentDetections.values()];
|
||||
if (!found.length)
|
||||
found.push('[no detections]');
|
||||
this.console.log(`[${Math.round((now - start) / 100) / 10}s] Detected:`, ...found);
|
||||
currentDetections.clear();
|
||||
lastReport = now;
|
||||
}
|
||||
|
||||
// apply the zones to the detections and get a shallow copy list of detections after
|
||||
// exclusion zones have applied
|
||||
const zonedDetections = this.applyZones(detected.detected);
|
||||
const filteredDetections = zonedDetections
|
||||
.filter(d => {
|
||||
if (!d.zones?.length)
|
||||
return d.score >= this.scoreThreshold;
|
||||
|
||||
for (const zone of d.zones || []) {
|
||||
const zi = this.zoneInfos[zone];
|
||||
const scoreThreshold = zi?.scoreThreshold || this.scoreThreshold;
|
||||
if (d.score >= scoreThreshold)
|
||||
return true;
|
||||
}
|
||||
});
|
||||
|
||||
detected.detected.detections = filteredDetections;
|
||||
|
||||
detections++;
|
||||
// this.console.warn('dps', detections / (Date.now() - start) * 1000);
|
||||
|
||||
if (detected.detected.detectionId) {
|
||||
const jpeg = await detected.videoFrame.toBuffer({
|
||||
format: 'jpg',
|
||||
});
|
||||
const mo = await sdk.mediaManager.createMediaObject(jpeg, 'image/jpeg');
|
||||
this.setDetection(detected.detected, mo);
|
||||
// this.console.log('image saved', detected.detected.detections);
|
||||
}
|
||||
this.reportObjectDetections(detected.detected);
|
||||
if (this.hasMotionType) {
|
||||
await sleep(250);
|
||||
}
|
||||
// this.handleDetectionEvent(detected.detected);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
this.console.error('video pipeline ended with error', e);
|
||||
}
|
||||
finally {
|
||||
this.console.log('video pipeline analysis ended, dps:', detections / (Date.now() - start) * 1000);
|
||||
this.endObjectDetection();
|
||||
}
|
||||
}
|
||||
|
||||
async startStreamAnalysis() {
|
||||
if (this.newPipeline) {
|
||||
await this.startPipelineAnalysis();
|
||||
}
|
||||
else if (!this.hasMotionType && this.storageSettings.values.captureMode === 'Snapshot') {
|
||||
await this.startSnapshotAnalysis();
|
||||
}
|
||||
else {
|
||||
await this.startVideoDetection();
|
||||
}
|
||||
}
|
||||
|
||||
async extendedObjectDetect(force?: boolean) {
|
||||
if (!this.hasMotionType && this.storageSettings.values.captureMode === 'Snapshot') {
|
||||
this.analyzeStop = Date.now() + this.getDetectionDuration();
|
||||
}
|
||||
else {
|
||||
try {
|
||||
if (!force && !this.motionDetected)
|
||||
return;
|
||||
await this.objectDetection?.detectObjects(undefined, {
|
||||
detectionId: this.detectionId,
|
||||
duration: this.getDetectionDuration(),
|
||||
settings: this.getCurrentSettings(),
|
||||
}, this);
|
||||
}
|
||||
catch (e) {
|
||||
// ignore any
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async startVideoDetection() {
|
||||
try {
|
||||
const settings = this.getCurrentSettings();
|
||||
|
||||
// prevent stream retrieval noise until notified that the detection is no longer running.
|
||||
if (this.detectorRunning) {
|
||||
const session = await this.objectDetection?.detectObjects(undefined, {
|
||||
detectionId: this.detectionId,
|
||||
duration: this.getDetectionDuration(),
|
||||
settings,
|
||||
}, this);
|
||||
this.detectorRunning = session.running;
|
||||
if (this.detectorRunning)
|
||||
return;
|
||||
}
|
||||
|
||||
// dummy up the last detection time to prevent the idle timers from purging everything.
|
||||
this.detectionState.lastDetection = Date.now();
|
||||
|
||||
this.detectorRunning = true;
|
||||
let stream: MediaObject;
|
||||
|
||||
stream = await this.cameraDevice.getVideoStream({
|
||||
destination: !this.hasMotionType ? 'local-recorder' : 'low-resolution',
|
||||
// ask rebroadcast to mute audio, not needed.
|
||||
audio: null,
|
||||
});
|
||||
const session = await this.objectDetection?.detectObjects(stream, {
|
||||
detectionId: this.detectionId,
|
||||
duration: this.getDetectionDuration(),
|
||||
settings,
|
||||
}, this);
|
||||
|
||||
this.detectorRunning = session.running;
|
||||
}
|
||||
catch (e) {
|
||||
this.console.log('failure retrieving stream', e);
|
||||
this.detectorRunning = false;
|
||||
if (detected.detected.detectionId) {
|
||||
updatePipelineStatus('creating jpeg');
|
||||
// const start = Date.now();
|
||||
const vf = await sdk.connectRPCObject(detected.videoFrame);
|
||||
const jpeg = await vf.toBuffer({
|
||||
format: 'jpg',
|
||||
});
|
||||
const mo = await sdk.mediaManager.createMediaObject(jpeg, 'image/jpeg');
|
||||
// this.console.log('retain took', Date.now() -start);
|
||||
this.setDetection(detected.detected, mo);
|
||||
// this.console.log('image saved', detected.detected.detections);
|
||||
}
|
||||
this.reportObjectDetections(detected.detected);
|
||||
if (this.hasMotionType) {
|
||||
// const diff = Date.now() - when;
|
||||
// when = Date.now();
|
||||
// this.console.log('sleper', diff);
|
||||
await sleep(250);
|
||||
}
|
||||
updatePipelineStatus('waiting result');
|
||||
// this.handleDetectionEvent(detected.detected);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
normalizeBox(boundingBox: [number, number, number, number], inputDimensions: [number, number]) {
|
||||
@@ -771,7 +522,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
copy = copy.filter(c => c !== o);
|
||||
}
|
||||
|
||||
return copy as TrackedDetection[];
|
||||
return copy;
|
||||
}
|
||||
|
||||
reportObjectDetections(detection: ObjectsDetected) {
|
||||
@@ -806,93 +557,11 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detection);
|
||||
}
|
||||
|
||||
async trackObjects(detectionResult: ObjectsDetected, showAll?: boolean) {
|
||||
// do not denoise
|
||||
if (this.hasMotionType) {
|
||||
return detectionResult;
|
||||
}
|
||||
|
||||
if (!detectionResult?.detections) {
|
||||
// detection session ended.
|
||||
return detectionResult;
|
||||
}
|
||||
|
||||
const { detections } = detectionResult;
|
||||
|
||||
const found: DenoisedDetectionEntry<TrackedDetection>[] = [];
|
||||
denoiseDetections<TrackedDetection>(this.detectionState, detections.map(detection => ({
|
||||
get id() {
|
||||
return detection.id;
|
||||
},
|
||||
set id(id) {
|
||||
detection.id = id;
|
||||
},
|
||||
name: detection.className,
|
||||
score: detection.score,
|
||||
detection,
|
||||
get firstSeen() {
|
||||
return detection.history?.firstSeen
|
||||
},
|
||||
set firstSeen(value) {
|
||||
detection.history = detection.history || {
|
||||
firstSeen: value,
|
||||
lastSeen: value,
|
||||
};
|
||||
detection.history.firstSeen = value;
|
||||
},
|
||||
get lastSeen() {
|
||||
return detection.history?.lastSeen
|
||||
},
|
||||
set lastSeen(value) {
|
||||
detection.history = detection.history || {
|
||||
firstSeen: value,
|
||||
lastSeen: value,
|
||||
};
|
||||
detection.history.lastSeen = value;
|
||||
},
|
||||
boundingBox: detection.boundingBox,
|
||||
})), {
|
||||
timeout: this.storageSettings.values.detectionTimeout * 1000,
|
||||
added: d => {
|
||||
found.push(d);
|
||||
d.detection.bestScore = d.detection.score;
|
||||
d.detection.newOrBetterDetection = true;
|
||||
},
|
||||
removed: d => {
|
||||
this.console.log('expired detection:', `${d.detection.className} (${d.detection.score})`);
|
||||
if (detectionResult.running)
|
||||
this.extendedObjectDetect();
|
||||
},
|
||||
retained: (d, o) => {
|
||||
if (d.detection.score > o.detection.bestScore) {
|
||||
d.detection.bestScore = d.detection.score;
|
||||
d.detection.newOrBetterDetection = true;
|
||||
}
|
||||
else {
|
||||
d.detection.bestScore = o.detection.bestScore;
|
||||
}
|
||||
d.detection.bestSecondPassScore = o.detection.bestSecondPassScore;
|
||||
},
|
||||
expiring: (d) => {
|
||||
},
|
||||
});
|
||||
if (found.length) {
|
||||
this.console.log('new detection:', found.map(d => `${d.id} ${d.detection.className} (${d.detection.score})`).join(', '));
|
||||
if (detectionResult.running)
|
||||
this.extendedObjectDetect();
|
||||
}
|
||||
if (found.length || showAll) {
|
||||
this.console.log('current detections:', this.detectionState.previousDetections.map(d => `${d.detection.className} (${d.detection.score}, ${d.detection.boundingBox?.join(', ')})`).join(', '));
|
||||
}
|
||||
|
||||
return detectionResult;
|
||||
}
|
||||
|
||||
setDetection(detection: ObjectsDetected, detectionInput: MediaObject) {
|
||||
if (!detection.detectionId)
|
||||
detection.detectionId = crypto.randomBytes(4).toString('hex');
|
||||
|
||||
this.console.log('retaining detection image');
|
||||
this.console.log('retaining detection image', ...detection.detections);
|
||||
|
||||
const { detectionId } = detection;
|
||||
this.detections.set(detectionId, detectionInput);
|
||||
@@ -942,9 +611,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
}
|
||||
|
||||
get newPipeline() {
|
||||
if (!this.plugin.storageSettings.values.newPipeline)
|
||||
return;
|
||||
|
||||
const newPipeline = this.storageSettings.values.newPipeline;
|
||||
if (!newPipeline)
|
||||
return newPipeline;
|
||||
@@ -979,8 +645,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
}
|
||||
|
||||
this.storageSettings.settings.motionSensorSupplementation.hide = !this.hasMotionType || !this.mixinDeviceInterfaces.includes(ScryptedInterface.MotionSensor);
|
||||
this.storageSettings.settings.captureMode.hide = this.hasMotionType || !!this.plugin.storageSettings.values.newPipeline;
|
||||
this.storageSettings.settings.newPipeline.hide = !this.plugin.storageSettings.values.newPipeline;
|
||||
this.storageSettings.settings.detectionDuration.hide = this.hasMotionType;
|
||||
this.storageSettings.settings.detectionTimeout.hide = this.hasMotionType;
|
||||
this.storageSettings.settings.motionDuration.hide = !this.hasMotionType;
|
||||
@@ -988,23 +652,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
|
||||
settings.push(...await this.storageSettings.getSettings());
|
||||
|
||||
let hideThreshold = true;
|
||||
if (!this.hasMotionType) {
|
||||
let hasInclusionZone = false;
|
||||
for (const zone of Object.keys(this.zones)) {
|
||||
const zi = this.zoneInfos[zone];
|
||||
if (!zi?.exclusion) {
|
||||
hasInclusionZone = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!hasInclusionZone) {
|
||||
hideThreshold = false;
|
||||
}
|
||||
}
|
||||
this.storageSettings.settings.scoreThreshold.hide = hideThreshold;
|
||||
this.storageSettings.settings.secondScoreThreshold.hide = hideThreshold;
|
||||
|
||||
settings.push({
|
||||
key: 'zones',
|
||||
title: 'Zones',
|
||||
@@ -1048,38 +695,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
],
|
||||
value: zi?.type || 'Intersect',
|
||||
});
|
||||
|
||||
if (!this.hasMotionType) {
|
||||
settings.push(
|
||||
{
|
||||
subgroup,
|
||||
key: `zoneinfo-classes-${name}`,
|
||||
title: `Detection Classes`,
|
||||
description: 'The detection classes to match inside this zone. An empty list will match all classes.',
|
||||
choices: (await this.getObjectTypes())?.classes || [],
|
||||
value: zi?.classes || [],
|
||||
multiple: true,
|
||||
},
|
||||
{
|
||||
subgroup,
|
||||
title: 'Minimum Detection Confidence',
|
||||
description: 'Higher values eliminate false positives and low quality recognition candidates.',
|
||||
key: `zoneinfo-scoreThreshold-${name}`,
|
||||
type: 'number',
|
||||
value: zi?.scoreThreshold || this.scoreThreshold,
|
||||
placeholder: '.2',
|
||||
},
|
||||
{
|
||||
subgroup,
|
||||
title: 'Second Pass Confidence',
|
||||
description: 'Crop and reanalyze a result from the initial detection pass to get more accurate results.',
|
||||
key: `zoneinfo-secondScoreThreshold-${name}`,
|
||||
type: 'number',
|
||||
value: zi?.secondScoreThreshold || this.secondScoreThreshold,
|
||||
placeholder: '.7',
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.hasMotionType) {
|
||||
@@ -1155,9 +770,9 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
}
|
||||
|
||||
if (key === 'analyzeButton') {
|
||||
this.analyzeStop = Date.now() + 60000;
|
||||
// await this.snapshotDetection();
|
||||
await this.startStreamAnalysis();
|
||||
this.startPipelineAnalysis();
|
||||
this.analyzeStop = Date.now() + 60000;
|
||||
}
|
||||
else {
|
||||
const settings = this.getCurrentSettings();
|
||||
@@ -1175,7 +790,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
this.clearMotionTimeout();
|
||||
this.motionListener?.removeListener();
|
||||
this.motionMixinListener?.removeListener();
|
||||
this.detectorListener?.removeListener();
|
||||
this.endObjectDetection();
|
||||
}
|
||||
}
|
||||
@@ -1242,16 +856,18 @@ class ObjectDetectorMixin extends MixinDeviceBase<ObjectDetection> implements Mi
|
||||
}
|
||||
}
|
||||
|
||||
interface ObjectDetectionStatistics {
|
||||
dps: number;
|
||||
sampleTime: number;
|
||||
}
|
||||
|
||||
class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings, DeviceProvider {
|
||||
currentMixins = new Set<ObjectDetectorMixin>();
|
||||
|
||||
objectDetectionStatistics = new Map<number, ObjectDetectionStatistics>();
|
||||
statsSnapshotTime: number;
|
||||
statsSnapshotDetections: number;
|
||||
statsSnapshotConcurrent = 0;
|
||||
storageSettings = new StorageSettings(this, {
|
||||
newPipeline: {
|
||||
title: 'New Video Pipeline',
|
||||
description: 'Enables the new video pipeline addded on 2023/03/25. If there are issues with motion or object detection, disable this to switch back to the old pipeline. Then reload the plugin.',
|
||||
type: 'boolean',
|
||||
defaultValue: true,
|
||||
},
|
||||
activeMotionDetections: {
|
||||
title: 'Active Motion Detection Sessions',
|
||||
readonly: true,
|
||||
@@ -1265,12 +881,77 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
|
||||
title: 'Active Object Detection Sessions',
|
||||
readonly: true,
|
||||
mapGet: () => {
|
||||
// could use the stats variable...
|
||||
return [...this.currentMixins.values()]
|
||||
.reduce((c1, v1) => c1 + [...v1.currentMixins.values()]
|
||||
.reduce((c2, v2) => c2 + (!v2.hasMotionType && v2.detectorRunning ? 1 : 0), 0), 0);
|
||||
}
|
||||
},
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
shouldUseSnapshotPipeline() {
|
||||
this.pruneOldStatistics();
|
||||
|
||||
for (const [k, v] of this.objectDetectionStatistics.entries()) {
|
||||
// check the stats history to see if any sessions
|
||||
// with same or lower number of cameras were on the struggle bus.
|
||||
if (v.dps < 2 && k <= this.statsSnapshotConcurrent)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
pruneOldStatistics() {
|
||||
const now = Date.now();
|
||||
for (const [k, v] of this.objectDetectionStatistics.entries()) {
|
||||
// purge the stats every hour
|
||||
if (Date.now() - v.sampleTime > 60 * 60 * 1000)
|
||||
this.objectDetectionStatistics.delete(k);
|
||||
}
|
||||
}
|
||||
|
||||
trackDetection() {
|
||||
this.statsSnapshotDetections++;
|
||||
}
|
||||
|
||||
objectDetectionStarted(console: Console) {
|
||||
this.resetStats(console);
|
||||
|
||||
this.statsSnapshotConcurrent++;
|
||||
}
|
||||
|
||||
objectDetectionEnded(console: Console, snapshotPipeline: boolean) {
|
||||
this.resetStats(console, snapshotPipeline);
|
||||
|
||||
this.statsSnapshotConcurrent--;
|
||||
}
|
||||
|
||||
resetStats(console: Console, snapshotPipeline?: boolean) {
|
||||
const now = Date.now();
|
||||
const concurrentSessions = this.statsSnapshotConcurrent;
|
||||
if (concurrentSessions) {
|
||||
const duration = now - this.statsSnapshotTime;
|
||||
const stats: ObjectDetectionStatistics = {
|
||||
sampleTime: now,
|
||||
dps: this.statsSnapshotDetections / (duration / 1000),
|
||||
};
|
||||
|
||||
// ignore short sessions and sessions with no detections (busted?).
|
||||
// also ignore snapshot sessions because that will skew/throttle the stats used
|
||||
// to determine system dps capabilities.
|
||||
if (duration > 10000 && this.statsSnapshotDetections && !snapshotPipeline)
|
||||
this.objectDetectionStatistics.set(concurrentSessions, stats);
|
||||
|
||||
this.pruneOldStatistics();
|
||||
|
||||
const str = `video analysis, ${concurrentSessions} camera(s), dps: ${Math.round(stats.dps * 10) / 10} (${this.statsSnapshotDetections}/${Math.round(duration / 1000)})`;
|
||||
this.console.log(str);
|
||||
console?.log(str);
|
||||
}
|
||||
|
||||
this.statsSnapshotDetections = 0;
|
||||
this.statsSnapshotTime = now;
|
||||
}
|
||||
|
||||
constructor(nativeId?: ScryptedNativeId) {
|
||||
super(nativeId);
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
export function sleep(ms: number) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
22
plugins/onvif/package-lock.json
generated
22
plugins/onvif/package-lock.json
generated
@@ -1,18 +1,17 @@
|
||||
{
|
||||
"name": "@scrypted/onvif",
|
||||
"version": "0.0.118",
|
||||
"version": "0.0.120",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/onvif",
|
||||
"version": "0.0.118",
|
||||
"version": "0.0.120",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/node": "^16.9.1",
|
||||
"base-64": "^1.0.0",
|
||||
"http-auth-utils": "^3.0.2",
|
||||
"md5": "^2.3.0",
|
||||
@@ -21,6 +20,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/md5": "^2.3.1",
|
||||
"@types/node": "^18.15.11",
|
||||
"@types/xml2js": "^0.4.9"
|
||||
}
|
||||
},
|
||||
@@ -65,7 +65,7 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.68",
|
||||
"version": "0.2.87",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
@@ -130,9 +130,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "16.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
|
||||
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/xml2js": {
|
||||
"version": "0.4.9",
|
||||
@@ -328,9 +329,10 @@
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "16.9.1",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-16.9.1.tgz",
|
||||
"integrity": "sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g=="
|
||||
"version": "18.15.11",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
|
||||
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/xml2js": {
|
||||
"version": "0.4.9",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/onvif",
|
||||
"version": "0.0.118",
|
||||
"version": "0.0.120",
|
||||
"description": "ONVIF Camera Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
@@ -39,7 +39,6 @@
|
||||
"@koush/axios-digest-auth": "^0.8.5",
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/node": "^16.9.1",
|
||||
"base-64": "^1.0.0",
|
||||
"http-auth-utils": "^3.0.2",
|
||||
"md5": "^2.3.0",
|
||||
@@ -48,6 +47,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/md5": "^2.3.1",
|
||||
"@types/node": "^18.15.11",
|
||||
"@types/xml2js": "^0.4.9"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ function stripNamespaces(topic: string) {
|
||||
let parts = topic.split('/')
|
||||
for (let index = 0; index < parts.length; index++) {
|
||||
let stringNoNamespace = parts[index].split(':').pop() // split on :, then return the last item in the array
|
||||
if (output.length == 0) {
|
||||
if (output.length === 0) {
|
||||
output += stringNoNamespace
|
||||
} else {
|
||||
output += '/' + stringNoNamespace
|
||||
@@ -92,9 +92,18 @@ export class OnvifCameraAPI {
|
||||
else
|
||||
ret.emit('event', OnvifEvent.AudioStop)
|
||||
}
|
||||
// Reolink
|
||||
else if (eventTopic.includes('Visitor') && (dataValue === true || dataValue === false)) {
|
||||
if (dataValue) {
|
||||
ret.emit('event', OnvifEvent.BinaryStart)
|
||||
}
|
||||
else {
|
||||
ret.emit('event', OnvifEvent.BinaryStop)
|
||||
}
|
||||
}
|
||||
// Mobotix T26
|
||||
else if (eventTopic.includes('VideoSource/Alarm')) {
|
||||
if (dataValue == "Ring" || dataValue == "CameraBellButton") {
|
||||
if (dataValue === "Ring" || dataValue === "CameraBellButton") {
|
||||
ret.emit('event', OnvifEvent.BinaryRingEvent);
|
||||
}
|
||||
}
|
||||
@@ -155,7 +164,7 @@ export class OnvifCameraAPI {
|
||||
this.console.log('supportsEvents error', err);
|
||||
return reject(err);
|
||||
}
|
||||
if (!err && data.events && data.events.WSPullPointSupport && data.events.WSPullPointSupport == true) {
|
||||
if (!err && data.events && data.events.WSPullPointSupport && data.events.WSPullPointSupport === true) {
|
||||
this.console.log('Camera supports WSPullPoint', xml);
|
||||
} else {
|
||||
this.console.log('Camera does not show WSPullPoint support, but trying anyway', xml);
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import sdk, { MediaObject, Intercom, FFmpegInput, ScryptedMimeTypes } from "@scrypted/sdk";
|
||||
import { RtspSmartCamera } from "../../rtsp/src/rtsp";
|
||||
import { parseSemicolonDelimited, RtspClient } from "@scrypted/common/src/rtsp-server";
|
||||
import { createBindZero } from "@scrypted/common/src/listen-cluster";
|
||||
import { RtspClient, parseSemicolonDelimited } from "@scrypted/common/src/rtsp-server";
|
||||
import { parseSdp } from "@scrypted/common/src/sdp-utils";
|
||||
import { ffmpegLogInitialOutput, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
|
||||
import child_process from 'child_process';
|
||||
import { createBindZero, reserveUdpPort } from "@scrypted/common/src/listen-cluster";
|
||||
import sdk, { FFmpegInput, Intercom, MediaObject, ScryptedMimeTypes } from "@scrypted/sdk";
|
||||
import crypto from 'crypto';
|
||||
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
|
||||
import { nextSequenceNumber } from "../../homekit/src/types/camera/jitter-buffer";
|
||||
import { RtspSmartCamera } from "../../rtsp/src/rtsp";
|
||||
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
|
||||
|
||||
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
@@ -80,11 +82,11 @@ export class OnvifIntercom implements Intercom {
|
||||
const url = new URL(this.url);
|
||||
url.username = username;
|
||||
url.password = password;
|
||||
this.intercomClient = new RtspClient(url.toString());
|
||||
this.intercomClient.console = this.camera.console;
|
||||
await this.intercomClient.options();
|
||||
const intercomClient = this.intercomClient = new RtspClient(url.toString());
|
||||
intercomClient.console = this.camera.console;
|
||||
await intercomClient.options();
|
||||
|
||||
const describe = await this.intercomClient.describe({
|
||||
const describe = await intercomClient.describe({
|
||||
Require,
|
||||
});
|
||||
this.camera.console.log('ONVIF Backchannel SDP:');
|
||||
@@ -94,31 +96,35 @@ export class OnvifIntercom implements Intercom {
|
||||
if (!audioBackchannel)
|
||||
throw new Error('ONVIF audio backchannel not found');
|
||||
|
||||
return audioBackchannel;
|
||||
return { audioBackchannel, intercomClient };
|
||||
}
|
||||
|
||||
async startIntercom(media: MediaObject) {
|
||||
const ffmpegInput = await mediaManager.convertMediaObjectToJSON<FFmpegInput>(media, ScryptedMimeTypes.FFmpegInput);
|
||||
|
||||
await this.stopIntercom();
|
||||
|
||||
const audioBackchannel = await this.checkIntercom();
|
||||
const { audioBackchannel, intercomClient } = await this.checkIntercom();
|
||||
if (!audioBackchannel)
|
||||
throw new Error('ONVIF audio backchannel not found');
|
||||
|
||||
const rtp = await reserveUdpPort();
|
||||
const rtpServer = await createBindZero('udp4');
|
||||
const rtp = rtpServer.port;
|
||||
const rtcp = rtp + 1;
|
||||
|
||||
let ip: string;
|
||||
let serverRtp: number;
|
||||
let transportDict: ReturnType<typeof parseSemicolonDelimited>;
|
||||
let tcp = false;
|
||||
try {
|
||||
const headers: any = {
|
||||
Require,
|
||||
Transport: `RTP/AVP;unicast;client_port=${rtp}-${rtcp}`,
|
||||
};
|
||||
|
||||
const response = await this.intercomClient.request('SETUP', headers, audioBackchannel.control);
|
||||
const response = await intercomClient.request('SETUP', headers, audioBackchannel.control);
|
||||
transportDict = parseSemicolonDelimited(response.headers.transport);
|
||||
this.intercomClient.session = response.headers.session.split(';')[0];
|
||||
intercomClient.session = response.headers.session.split(';')[0];
|
||||
ip = this.camera.getIPAddress();
|
||||
|
||||
const { server_port } = transportDict;
|
||||
@@ -126,6 +132,7 @@ export class OnvifIntercom implements Intercom {
|
||||
serverRtp = parseInt(serverPorts[0]);
|
||||
}
|
||||
catch (e) {
|
||||
tcp = true;
|
||||
this.camera.console.error('onvif udp backchannel failed, falling back to tcp', e);
|
||||
|
||||
const headers: any = {
|
||||
@@ -133,21 +140,19 @@ export class OnvifIntercom implements Intercom {
|
||||
Transport: `RTP/AVP/TCP;unicast;interleaved=0-1`,
|
||||
};
|
||||
|
||||
const response = await this.intercomClient.request('SETUP', headers, audioBackchannel.control);
|
||||
const response = await intercomClient.request('SETUP', headers, audioBackchannel.control);
|
||||
transportDict = parseSemicolonDelimited(response.headers.transport);
|
||||
this.intercomClient.session = response.headers.session.split(';')[0];
|
||||
intercomClient.session = response.headers.session.split(';')[0];
|
||||
ip = '127.0.0.1';
|
||||
const server = await createBindZero('udp4');
|
||||
this.intercomClient.client.on('close', () => server.server.close());
|
||||
intercomClient.client.on('close', () => server.server.close());
|
||||
serverRtp = server.port;
|
||||
server.server.on('message', data => {
|
||||
this.intercomClient.send(data, 0);
|
||||
intercomClient.send(data, 0);
|
||||
});
|
||||
}
|
||||
this.camera.console.log('backchannel transport', transportDict);
|
||||
|
||||
const ffmpegInput = await mediaManager.convertMediaObjectToJSON<FFmpegInput>(media, ScryptedMimeTypes.FFmpegInput);
|
||||
|
||||
const availableCodecs = [...parseCodecs(audioBackchannel.contents)];
|
||||
let match: CodecMatch;
|
||||
let codec: SupportedCodec;
|
||||
@@ -171,27 +176,69 @@ export class OnvifIntercom implements Intercom {
|
||||
}
|
||||
// ffmpeg expects ssrc as signed int32.
|
||||
const ssrc = ssrcBuffer.readInt32BE(0);
|
||||
const ssrcUnsigned = ssrcBuffer.readUint32BE(0);
|
||||
|
||||
const args = [
|
||||
'-hide_banner',
|
||||
...ffmpegInput.inputArguments,
|
||||
'-vn',
|
||||
'-acodec', codec.ffmpegCodec,
|
||||
'-ar', match.sampleRate,
|
||||
'-ac', match.channels || '1',
|
||||
"-payload_type", match.payloadType,
|
||||
"-ssrc", ssrc.toString(),
|
||||
'-f', 'rtp',
|
||||
`rtp://${ip}:${serverRtp}?localrtpport=${rtp}&localrtcpport=${rtcp}`,
|
||||
];
|
||||
safePrintFFmpegArguments(this.camera.console, args);
|
||||
const cp = child_process.spawn(await mediaManager.getFFmpegPath(), args);
|
||||
const payloadType = parseInt(match.payloadType);
|
||||
|
||||
ffmpegLogInitialOutput(this.camera.console, cp);
|
||||
|
||||
await this.intercomClient.play({
|
||||
await intercomClient.play({
|
||||
Require,
|
||||
});
|
||||
|
||||
let pending: RtpPacket;
|
||||
let seqNumber = 0;
|
||||
|
||||
const forwarder = await startRtpForwarderProcess(console, ffmpegInput, {
|
||||
audio: {
|
||||
onRtp: (rtp) => {
|
||||
// if (true) {
|
||||
// const p = RtpPacket.deSerialize(rtp);
|
||||
// p.header.payloadType = payloadType;
|
||||
// p.header.ssrc = ssrcUnsigned;
|
||||
// p.header.marker = true;
|
||||
// rtpServer.server.send(p.serialize(), serverRtp, ip);
|
||||
// return;
|
||||
// }
|
||||
|
||||
const p = RtpPacket.deSerialize(rtp);
|
||||
|
||||
if (!pending) {
|
||||
pending = p;
|
||||
return;
|
||||
}
|
||||
|
||||
if (pending.payload.length + p.payload.length < 1024) {
|
||||
pending.payload = Buffer.concat([pending.payload, p.payload]);
|
||||
return;
|
||||
}
|
||||
|
||||
pending.header.payloadType = payloadType;
|
||||
pending.header.ssrc = ssrcUnsigned;
|
||||
pending.header.sequenceNumber = seqNumber;
|
||||
seqNumber = nextSequenceNumber(seqNumber);
|
||||
pending.header.marker = true;
|
||||
|
||||
if (!tcp)
|
||||
rtpServer.server.send(pending.serialize(), serverRtp, ip);
|
||||
else
|
||||
intercomClient.send(pending.serialize(), 0);
|
||||
|
||||
pending = p;
|
||||
},
|
||||
codecCopy: codec.ffmpegCodec,
|
||||
payloadType,
|
||||
ssrc,
|
||||
packetSize: 1024,
|
||||
encoderArguments: [
|
||||
'-acodec', codec.ffmpegCodec,
|
||||
'-ar', match.sampleRate,
|
||||
'-ac', match.channels || '1',
|
||||
],
|
||||
}
|
||||
});
|
||||
|
||||
intercomClient.client.on('close', () => forwarder.kill());
|
||||
forwarder.killPromise.finally(() => intercomClient?.client.destroy());
|
||||
|
||||
this.camera.console.log('intercom playing');
|
||||
}
|
||||
|
||||
|
||||
4
plugins/opencv/package-lock.json
generated
4
plugins/opencv/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/opencv",
|
||||
"version": "0.0.69",
|
||||
"version": "0.0.74",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/opencv",
|
||||
"version": "0.0.69",
|
||||
"version": "0.0.74",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -36,5 +36,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.0.69"
|
||||
"version": "0.0.74"
|
||||
}
|
||||
|
||||
@@ -1,22 +1,46 @@
|
||||
from __future__ import annotations
|
||||
from time import sleep
|
||||
from detect import DetectionSession, DetectPlugin
|
||||
from typing import Any, List, Tuple
|
||||
import numpy as np
|
||||
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
from typing import Any, List, Tuple
|
||||
|
||||
import cv2
|
||||
import imutils
|
||||
Gst = None
|
||||
try:
|
||||
from gi.repository import Gst
|
||||
except:
|
||||
pass
|
||||
from scrypted_sdk.types import ObjectDetectionModel, ObjectDetectionResult, ObjectsDetected, Setting, VideoFrame
|
||||
import numpy as np
|
||||
import scrypted_sdk
|
||||
from PIL import Image
|
||||
from scrypted_sdk.types import (ObjectDetectionGeneratorSession,ObjectDetectionSession,
|
||||
ObjectDetectionResult, ObjectsDetected,
|
||||
Setting, VideoFrame)
|
||||
|
||||
class OpenCVDetectionSession(DetectionSession):
|
||||
from detect import DetectPlugin
|
||||
|
||||
# vips is already multithreaded, but needs to be kicked off the python asyncio thread.
|
||||
toThreadExecutor = concurrent.futures.ThreadPoolExecutor(max_workers=2, thread_name_prefix="image")
|
||||
|
||||
async def to_thread(f):
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(toThreadExecutor, f)
|
||||
|
||||
async def ensureGrayData(data: bytes, size: Tuple[int, int], format: str):
|
||||
if format == 'gray':
|
||||
return data
|
||||
|
||||
def convert():
|
||||
if format == 'rgba':
|
||||
image = Image.frombuffer('RGBA', size, data)
|
||||
else:
|
||||
image = Image.frombuffer('RGB', size, data)
|
||||
|
||||
try:
|
||||
return image.convert('L').tobytes()
|
||||
finally:
|
||||
image.close()
|
||||
return await to_thread(convert)
|
||||
|
||||
|
||||
class OpenCVDetectionSession:
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.cap: cv2.VideoCapture = None
|
||||
self.previous_frame: Any = None
|
||||
self.curFrame = None
|
||||
@@ -35,21 +59,6 @@ defaultBlur = 5
|
||||
class OpenCVPlugin(DetectPlugin):
|
||||
def __init__(self, nativeId: str | None = None):
|
||||
super().__init__(nativeId=nativeId)
|
||||
self.color2Gray = None
|
||||
self.pixelFormat = "I420"
|
||||
self.pixelFormatChannelCount = 1
|
||||
|
||||
if True:
|
||||
self.retainAspectRatio = False
|
||||
self.color2Gray = None
|
||||
self.pixelFormat = "I420"
|
||||
self.pixelFormatChannelCount = 1
|
||||
else:
|
||||
self.retainAspectRatio = True
|
||||
self.color2Gray = cv2.COLOR_BGRA2GRAY
|
||||
self.pixelFormat = "BGRA"
|
||||
self.pixelFormatChannelCount = 4
|
||||
|
||||
|
||||
def getClasses(self) -> list[str]:
|
||||
return ['motion']
|
||||
@@ -91,9 +100,6 @@ class OpenCVPlugin(DetectPlugin):
|
||||
]
|
||||
|
||||
return settings
|
||||
|
||||
def get_pixel_format(self):
|
||||
return self.pixelFormat
|
||||
|
||||
def get_input_format(self) -> str:
|
||||
return 'gray'
|
||||
@@ -110,42 +116,37 @@ class OpenCVPlugin(DetectPlugin):
|
||||
blur = int(settings.get('blur', blur))
|
||||
return area, threshold, interval, blur
|
||||
|
||||
def detect(self, detection_session: OpenCVDetectionSession, frame, src_size, convert_to_src_size) -> ObjectsDetected:
|
||||
settings = detection_session.settings
|
||||
def detect(self, frame, detection_session: ObjectDetectionSession, src_size, convert_to_src_size) -> ObjectsDetected:
|
||||
session: OpenCVDetectionSession = detection_session['settings']['session']
|
||||
settings = detection_session and detection_session.get('settings', None)
|
||||
area, threshold, interval, blur = self.parse_settings(settings)
|
||||
|
||||
# see get_detection_input_size on undocumented size requirements for GRAY8
|
||||
if self.color2Gray != None:
|
||||
detection_session.gray = cv2.cvtColor(
|
||||
frame, self.color2Gray, dst=detection_session.gray)
|
||||
gray = detection_session.gray
|
||||
else:
|
||||
gray = frame
|
||||
detection_session.curFrame = cv2.GaussianBlur(
|
||||
gray, (blur, blur), 0, dst=detection_session.curFrame)
|
||||
gray = frame
|
||||
session.curFrame = cv2.GaussianBlur(
|
||||
gray, (blur, blur), 0, dst=session.curFrame)
|
||||
|
||||
detections: List[ObjectDetectionResult] = []
|
||||
detection_result: ObjectsDetected = {}
|
||||
detection_result['detections'] = detections
|
||||
detection_result['inputDimensions'] = src_size
|
||||
|
||||
if detection_session.previous_frame is None:
|
||||
detection_session.previous_frame = detection_session.curFrame
|
||||
detection_session.curFrame = None
|
||||
if session.previous_frame is None:
|
||||
session.previous_frame = session.curFrame
|
||||
session.curFrame = None
|
||||
return detection_result
|
||||
|
||||
detection_session.frameDelta = cv2.absdiff(
|
||||
detection_session.previous_frame, detection_session.curFrame, dst=detection_session.frameDelta)
|
||||
tmp = detection_session.curFrame
|
||||
detection_session.curFrame = detection_session.previous_frame
|
||||
detection_session.previous_frame = tmp
|
||||
session.frameDelta = cv2.absdiff(
|
||||
session.previous_frame, session.curFrame, dst=session.frameDelta)
|
||||
tmp = session.curFrame
|
||||
session.curFrame = session.previous_frame
|
||||
session.previous_frame = tmp
|
||||
|
||||
_, detection_session.thresh = cv2.threshold(
|
||||
detection_session.frameDelta, threshold, 255, cv2.THRESH_BINARY, dst=detection_session.thresh)
|
||||
detection_session.dilated = cv2.dilate(
|
||||
detection_session.thresh, None, iterations=2, dst=detection_session.dilated)
|
||||
_, session.thresh = cv2.threshold(
|
||||
session.frameDelta, threshold, 255, cv2.THRESH_BINARY, dst=session.thresh)
|
||||
session.dilated = cv2.dilate(
|
||||
session.thresh, None, iterations=2, dst=session.dilated)
|
||||
fcontours = cv2.findContours(
|
||||
detection_session.dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
||||
session.dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
||||
contours = imutils.grab_contours(fcontours)
|
||||
|
||||
|
||||
@@ -154,8 +155,8 @@ class OpenCVPlugin(DetectPlugin):
|
||||
# if w * h != contour_area:
|
||||
# print("mismatch w/h", contour_area - w * h)
|
||||
|
||||
x2, y2, _ = convert_to_src_size((x + w, y + h))
|
||||
x, y, _ = convert_to_src_size((x, y))
|
||||
x2, y2 = convert_to_src_size((x + w, y + h))
|
||||
x, y = convert_to_src_size((x, y))
|
||||
w = x2 - x + 1
|
||||
h = y2 - y + 1
|
||||
|
||||
@@ -206,11 +207,16 @@ class OpenCVPlugin(DetectPlugin):
|
||||
detection_session.cap = None
|
||||
return super().end_session(detection_session)
|
||||
|
||||
async def run_detection_image(self, detection_session: DetectionSession, image: Image.Image, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
|
||||
# todo
|
||||
raise Exception('can not run motion detection on image')
|
||||
|
||||
async def run_detection_videoframe(self, videoFrame: VideoFrame, detection_session: OpenCVDetectionSession) -> ObjectsDetected:
|
||||
async def generateObjectDetections(self, videoFrames: Any, detection_session: ObjectDetectionGeneratorSession = None) -> Any:
|
||||
if not detection_session:
|
||||
detection_session = {}
|
||||
if not detection_session.get('settings'):
|
||||
detection_session['settings'] = {}
|
||||
settings = detection_session['settings']
|
||||
settings['session'] = OpenCVDetectionSession()
|
||||
return super().generateObjectDetections(videoFrames, detection_session)
|
||||
|
||||
async def run_detection_videoframe(self, videoFrame: VideoFrame, detection_session: ObjectDetectionSession) -> ObjectsDetected:
|
||||
width = videoFrame.width
|
||||
height = videoFrame.height
|
||||
|
||||
@@ -234,64 +240,26 @@ class OpenCVPlugin(DetectPlugin):
|
||||
'height': height,
|
||||
}
|
||||
|
||||
format = videoFrame.format or 'gray'
|
||||
buffer = await videoFrame.toBuffer({
|
||||
'resize': resize,
|
||||
'format': format,
|
||||
})
|
||||
|
||||
def convert_to_src_size(point, normalize = False):
|
||||
return point[0] * scale, point[1] * scale, True
|
||||
mat = np.ndarray((height, width, self.pixelFormatChannelCount), buffer=buffer, dtype=np.uint8)
|
||||
detections = self.detect(
|
||||
detection_session, mat, (width, height), convert_to_src_size)
|
||||
return detections
|
||||
|
||||
async def run_detection_avframe(self, detection_session: DetectionSession, avframe, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
|
||||
if avframe.format.name != 'yuv420p' and avframe.format.name != 'yuvj420p':
|
||||
mat = avframe.to_ndarray(format='gray8')
|
||||
if format == 'gray':
|
||||
expectedLength = width * height
|
||||
# check if resize could not be completed
|
||||
if expectedLength != len(buffer):
|
||||
image = Image.frombuffer('L', (videoFrame.width, videoFrame.height), buffer)
|
||||
try:
|
||||
buffer = image.resize((width, height), Image.BILINEAR).tobytes()
|
||||
finally:
|
||||
image.close()
|
||||
else:
|
||||
mat = np.ndarray((avframe.height, avframe.width, self.pixelFormatChannelCount), buffer=avframe.planes[0], dtype=np.uint8)
|
||||
detections = self.detect(
|
||||
detection_session, mat, src_size, convert_to_src_size)
|
||||
if not detections or not len(detections['detections']):
|
||||
await self.detection_sleep(settings)
|
||||
return None, None
|
||||
return detections, None
|
||||
buffer = await ensureGrayData(buffer, (width, height), format)
|
||||
|
||||
async def run_detection_gstsample(self, detection_session: OpenCVDetectionSession, gst_sample, settings: Any, src_size, convert_to_src_size) -> ObjectsDetected:
|
||||
buf = gst_sample.get_buffer()
|
||||
caps = gst_sample.get_caps()
|
||||
# can't trust the width value, compute the stride
|
||||
height = caps.get_structure(0).get_value('height')
|
||||
width = caps.get_structure(0).get_value('width')
|
||||
result, info = buf.map(Gst.MapFlags.READ)
|
||||
if not result:
|
||||
return None, None
|
||||
try:
|
||||
mat = np.ndarray(
|
||||
(height,
|
||||
width,
|
||||
self.pixelFormatChannelCount),
|
||||
buffer=info.data,
|
||||
dtype=np.uint8)
|
||||
detections = self.detect(
|
||||
detection_session, mat, src_size, convert_to_src_size)
|
||||
# no point in triggering empty events.
|
||||
finally:
|
||||
buf.unmap(info)
|
||||
|
||||
if not detections or not len(detections['detections']):
|
||||
await self.detection_sleep(settings)
|
||||
return None, None
|
||||
return detections, None
|
||||
|
||||
def create_detection_session(self):
|
||||
return OpenCVDetectionSession()
|
||||
|
||||
async def detection_sleep(self, settings: Any):
|
||||
area, threshold, interval, blur = self.parse_settings(settings)
|
||||
# it is safe to block here because gstreamer creates a queue thread
|
||||
await asyncio.sleep(interval / 1000)
|
||||
|
||||
async def detection_event_notified(self, settings: Any):
|
||||
await self.detection_sleep(settings)
|
||||
return await super().detection_event_notified(settings)
|
||||
def convert_to_src_size(point):
|
||||
return point[0] * scale, point[1] * scale
|
||||
mat = np.ndarray((height, width, 1), buffer=buffer, dtype=np.uint8)
|
||||
detections = self.detect(mat, detection_session, (videoFrame.width, videoFrame.height), convert_to_src_size)
|
||||
return detections
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../tensorflow-lite/src/pipeline
|
||||
@@ -3,9 +3,6 @@ numpy>=1.16.2
|
||||
# pillow for anything not intel linux
|
||||
Pillow>=5.4.1; sys_platform != 'linux' or platform_machine != 'x86_64'
|
||||
pillow-simd; sys_platform == 'linux' and platform_machine == 'x86_64'
|
||||
PyGObject>=3.30.4; sys_platform != 'win32'
|
||||
imutils>=0.5.0
|
||||
# not available on armhf
|
||||
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
|
||||
# not available on armhf
|
||||
opencv-python; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
|
||||
opencv-python; sys_platform != 'linux' or platform_machine == 'x86_64'
|
||||
|
||||
4
plugins/pam-diff/package-lock.json
generated
4
plugins/pam-diff/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/pam-diff",
|
||||
"version": "0.0.18",
|
||||
"version": "0.0.21",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/pam-diff",
|
||||
"version": "0.0.18",
|
||||
"version": "0.0.21",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@types/node": "^16.6.1",
|
||||
|
||||
@@ -43,5 +43,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.0.18"
|
||||
"version": "0.0.21"
|
||||
}
|
||||
|
||||
@@ -1,54 +1,15 @@
|
||||
import sdk, { FFmpegInput, MediaObject, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionGeneratorResult, ObjectDetectionGeneratorSession, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionSession, ObjectsDetected, ScryptedDeviceBase, ScryptedInterface, ScryptedMimeTypes, VideoFrame } from '@scrypted/sdk';
|
||||
import child_process, { ChildProcess } from 'child_process';
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "../../../common/src/media-helpers";
|
||||
import sdk, { MediaObject, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionGeneratorResult, ObjectDetectionGeneratorSession, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionSession, ObjectsDetected, ScryptedDeviceBase, VideoFrame } from '@scrypted/sdk';
|
||||
|
||||
import PD from 'pam-diff';
|
||||
import P2P from 'pipe2pam';
|
||||
import { PassThrough, Writable } from 'stream';
|
||||
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
const defaultDifference = 9;
|
||||
const defaultPercentage = 15;
|
||||
const defaultPercentage = 2;
|
||||
|
||||
interface PamDiffSession {
|
||||
id: string;
|
||||
timeout?: NodeJS.Timeout;
|
||||
cp?: ChildProcess;
|
||||
pamDiff?: any;
|
||||
callbacks: ObjectDetectionCallbacks;
|
||||
}
|
||||
|
||||
class PamDiff extends ScryptedDeviceBase implements ObjectDetection {
|
||||
sessions = new Map<string, PamDiffSession>();
|
||||
|
||||
endSession(id: string) {
|
||||
const pds = this.sessions.get(id);
|
||||
if (!pds)
|
||||
return;
|
||||
this.sessions.delete(pds.id);
|
||||
const event: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
running: false,
|
||||
detectionId: pds.id,
|
||||
}
|
||||
clearTimeout(pds.timeout);
|
||||
safeKillFFmpeg(pds.cp);
|
||||
if (pds.callbacks) {
|
||||
pds.callbacks.onDetectionEnded(event);
|
||||
}
|
||||
else {
|
||||
this.onDeviceEvent(ScryptedInterface.ObjectDetection, event);
|
||||
}
|
||||
}
|
||||
|
||||
reschedule(id: string, duration: number,) {
|
||||
const pds = this.sessions.get(id);
|
||||
if (!pds)
|
||||
return;
|
||||
clearTimeout(pds.timeout);
|
||||
pds.timeout = setTimeout(() => this.endSession(id), duration);
|
||||
}
|
||||
|
||||
async * generateObjectDetectionsInternal(videoFrames: AsyncGenerator<VideoFrame, any, unknown>, session: ObjectDetectionGeneratorSession): AsyncGenerator<ObjectDetectionGeneratorResult, any, unknown> {
|
||||
videoFrames = await sdk.connectRPCObject(videoFrames);
|
||||
@@ -92,7 +53,6 @@ class PamDiff extends ScryptedDeviceBase implements ObjectDetection {
|
||||
}
|
||||
const event: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
running: true,
|
||||
inputDimensions: [width, height],
|
||||
detections,
|
||||
}
|
||||
@@ -111,10 +71,10 @@ ENDHDR
|
||||
`;
|
||||
|
||||
const buffer = await videoFrame.toBuffer({
|
||||
resize: {
|
||||
resize: (videoFrame.width !== width || videoFrame.height !== height) ? {
|
||||
width,
|
||||
height,
|
||||
},
|
||||
} : undefined,
|
||||
format: 'rgb',
|
||||
});
|
||||
pt.write(Buffer.from(header));
|
||||
@@ -146,157 +106,7 @@ ENDHDR
|
||||
}
|
||||
|
||||
async detectObjects(mediaObject: MediaObject, session?: ObjectDetectionSession, callbacks?: ObjectDetectionCallbacks): Promise<ObjectsDetected> {
|
||||
if (mediaObject && mediaObject.mimeType?.startsWith('image/'))
|
||||
throw new Error('can not run motion detection on image')
|
||||
|
||||
let { detectionId } = session;
|
||||
let pds = this.sessions.get(detectionId);
|
||||
if (pds)
|
||||
pds.callbacks = callbacks;
|
||||
|
||||
if (!session?.duration) {
|
||||
this.endSession(detectionId);
|
||||
return {
|
||||
detectionId,
|
||||
running: false,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (pds) {
|
||||
this.reschedule(detectionId, session.duration);
|
||||
pds.pamDiff.setDifference(session.settings?.difference || defaultDifference).setPercent(session.settings?.percent || defaultPercentage);
|
||||
return {
|
||||
detectionId,
|
||||
running: true,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
// unable to start/extend this session.
|
||||
if (!mediaObject) {
|
||||
this.endSession(detectionId);
|
||||
return {
|
||||
detectionId,
|
||||
running: false,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const ffmpeg = await mediaManager.getFFmpegPath();
|
||||
const ffmpegInput: FFmpegInput = JSON.parse((await mediaManager.convertMediaObjectToBuffer(
|
||||
mediaObject,
|
||||
ScryptedMimeTypes.FFmpegInput
|
||||
)).toString());
|
||||
|
||||
pds = {
|
||||
id: detectionId,
|
||||
callbacks,
|
||||
}
|
||||
this.reschedule(detectionId, session.duration);
|
||||
|
||||
const args = ffmpegInput.inputArguments.slice();
|
||||
args.unshift(
|
||||
'-hide_banner',
|
||||
...ffmpegInput.videoDecoderArguments || [],
|
||||
)
|
||||
args.push(
|
||||
'-an', '-dn',
|
||||
'-c:v',
|
||||
'pam',
|
||||
'-pix_fmt',
|
||||
'rgb24',
|
||||
'-f',
|
||||
'image2pipe',
|
||||
'-vf',
|
||||
`fps=2,scale=640:360`,
|
||||
'pipe:3',
|
||||
);
|
||||
|
||||
const p2p = new P2P();
|
||||
const pamDiff = new PD({
|
||||
difference: session.settings?.difference || defaultDifference,
|
||||
percent: session.settings?.percent || defaultPercentage,
|
||||
response: session?.settings?.motionAsObjects ? 'blobs' : 'percent',
|
||||
});
|
||||
|
||||
pamDiff.on('diff', async (data: any) => {
|
||||
const trigger = data.trigger[0];
|
||||
// console.log(trigger.blobs.length);
|
||||
const { blobs } = trigger;
|
||||
|
||||
const detections: ObjectDetectionResult[] = [];
|
||||
if (blobs?.length) {
|
||||
for (const blob of blobs) {
|
||||
detections.push(
|
||||
{
|
||||
className: 'motion',
|
||||
score: 1,
|
||||
boundingBox: [blob.minX, blob.minY, blob.maxX - blob.minX, blob.maxY - blob.minY],
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
else {
|
||||
detections.push(
|
||||
{
|
||||
className: 'motion',
|
||||
score: 1,
|
||||
}
|
||||
)
|
||||
}
|
||||
const event: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
running: true,
|
||||
detectionId: pds.id,
|
||||
inputDimensions: [640, 360],
|
||||
detections,
|
||||
}
|
||||
if (pds.callbacks) {
|
||||
pds.callbacks.onDetection(event);
|
||||
}
|
||||
else {
|
||||
this.onDeviceEvent(ScryptedInterface.ObjectDetection, event);
|
||||
}
|
||||
});
|
||||
|
||||
const console = sdk.deviceManager.getMixinConsole(mediaObject.sourceId, this.nativeId);
|
||||
|
||||
pds.pamDiff = pamDiff;
|
||||
pds.pamDiff
|
||||
.setDifference(session.settings?.difference || defaultDifference)
|
||||
.setPercent(session.settings?.percent || defaultPercentage)
|
||||
.setResponse(session?.settings?.motionAsObjects ? 'blobs' : 'percent');;
|
||||
safePrintFFmpegArguments(console, args);
|
||||
pds.cp = child_process.spawn(ffmpeg, args, {
|
||||
stdio: ['inherit', 'pipe', 'pipe', 'pipe']
|
||||
});
|
||||
let pamTimeout: NodeJS.Timeout;
|
||||
const resetTimeout = () => {
|
||||
clearTimeout(pamTimeout);
|
||||
pamTimeout = setTimeout(() => {
|
||||
const check = this.sessions.get(detectionId);
|
||||
if (check !== pds)
|
||||
return;
|
||||
console.error('PAM image stream timed out. Ending session.');
|
||||
this.endSession(detectionId);
|
||||
}, 60000);
|
||||
}
|
||||
p2p.on('data', () => {
|
||||
resetTimeout();
|
||||
})
|
||||
resetTimeout();
|
||||
pds.cp.stdio[3].pipe(p2p as any).pipe(pamDiff as any);
|
||||
pds.cp.on('exit', () => this.endSession(detectionId));
|
||||
ffmpegLogInitialOutput(console, pds.cp);
|
||||
|
||||
this.sessions.set(detectionId, pds);
|
||||
|
||||
return {
|
||||
detectionId,
|
||||
running: true,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
throw new Error('can not run motion detection on image')
|
||||
}
|
||||
|
||||
async getDetectionModel(): Promise<ObjectDetectionModel> {
|
||||
|
||||
2
plugins/prebuffer-mixin/.vscode/launch.json
vendored
2
plugins/prebuffer-mixin/.vscode/launch.json
vendored
@@ -10,7 +10,7 @@
|
||||
"port": 10081,
|
||||
"request": "attach",
|
||||
"skipFiles": [
|
||||
"**/plugin-remote-worker.*",
|
||||
"**/plugin-console.*",
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"autoAttachChildProcesses": true,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user