mirror of
https://github.com/koush/scrypted.git
synced 2026-02-03 14:13:28 +00:00
Compare commits
91 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
53c4aa7066 | ||
|
|
ce5547e4e7 | ||
|
|
95bdf5c2b5 | ||
|
|
8953a96089 | ||
|
|
0d270454ab | ||
|
|
e740a695c0 | ||
|
|
78118daa69 | ||
|
|
61a824d322 | ||
|
|
06bac3c748 | ||
|
|
16b10dc353 | ||
|
|
6892b443e0 | ||
|
|
8b303e037e | ||
|
|
76efef37ea | ||
|
|
e64a66aa66 | ||
|
|
05578d28c6 | ||
|
|
0889aea3be | ||
|
|
a081e6e3c9 | ||
|
|
5dfa0889b7 | ||
|
|
ed1d09b9be | ||
|
|
2d8a986155 | ||
|
|
1fb4cfd3b6 | ||
|
|
2d987747a2 | ||
|
|
d39e4e3ff1 | ||
|
|
012ca48f9a | ||
|
|
cca1f3e000 | ||
|
|
40a38cfd31 | ||
|
|
d2b39e8fa3 | ||
|
|
20101cda2e | ||
|
|
c90724daa6 | ||
|
|
fedb22fab2 | ||
|
|
994f1974d7 | ||
|
|
d648fe552d | ||
|
|
ccafff28cd | ||
|
|
3da49d47af | ||
|
|
e1918cfa89 | ||
|
|
7b19204d77 | ||
|
|
5dac1de87e | ||
|
|
c9a2474f17 | ||
|
|
e5d9d0d054 | ||
|
|
1272582510 | ||
|
|
51271a0e02 | ||
|
|
9b32952a22 | ||
|
|
5b92aea54b | ||
|
|
61b59f4ca0 | ||
|
|
93f8f43de2 | ||
|
|
dc88e0b07f | ||
|
|
14a9f953a9 | ||
|
|
528885d5e2 | ||
|
|
e779f37689 | ||
|
|
c6c2a8dc49 | ||
|
|
d8d2fd25cd | ||
|
|
301a5b6685 | ||
|
|
2a4bac42ed | ||
|
|
f55cadedb5 | ||
|
|
dd9ff45b21 | ||
|
|
a0aada2f03 | ||
|
|
8499843f31 | ||
|
|
672a33b93b | ||
|
|
f9a744c7dc | ||
|
|
5b124013b7 | ||
|
|
d2f1c69e98 | ||
|
|
2a2f96a771 | ||
|
|
dc9b5f447e | ||
|
|
1fb0c01e7e | ||
|
|
014d7b35ac | ||
|
|
b08267dab0 | ||
|
|
97d78516f2 | ||
|
|
360c2437c1 | ||
|
|
0b230bfc74 | ||
|
|
d25dc8d266 | ||
|
|
5f4d1e99cd | ||
|
|
ee38ef7817 | ||
|
|
80af38d3e1 | ||
|
|
2f19866f05 | ||
|
|
cf1c500e9d | ||
|
|
9a770e9dc9 | ||
|
|
6dbb8863a0 | ||
|
|
5eac8d0ab9 | ||
|
|
272bad8f29 | ||
|
|
83a3352862 | ||
|
|
4d5a693208 | ||
|
|
70e7f944c0 | ||
|
|
5a52c03a3d | ||
|
|
f9f597ef01 | ||
|
|
2e07788c0c | ||
|
|
9c0fbc1cb6 | ||
|
|
239d49899d | ||
|
|
2d3589b5a3 | ||
|
|
96ec465a38 | ||
|
|
5bb6b87c7d | ||
|
|
fcfedccaf8 |
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -13,11 +13,11 @@ Before opening an issue, view the device's Console logs in the Scrypted Manageme
|
||||
|
||||
**DO NOT OPEN ISSUES FOR ANY OF THE FOLLOWING:**
|
||||
|
||||
* Server setup assistance. Use Discord, Reddit, or Github Discussions.
|
||||
* Hardware setup assistance. Use Discord, Reddit, or Github Discussions.
|
||||
* Server or hardware setup assistance. Use Discord, Reddit, or Github Discussions.
|
||||
* Feature Requests. Use Discord, Reddit, or Github Discussions.
|
||||
* Packet loss in your camera logs. This is wifi/network congestion.
|
||||
* HomeKit weirdness. See HomeKit troubleshooting guide.
|
||||
* Release schedules or timelines. Releases are rolled out unevenly across the different server platforms.
|
||||
|
||||
However, if something **was working**, and is now **no longer working**, you may create a Github issue.
|
||||
Created issues that do not meet these requirements or are improperly filled out will be immediately closed.
|
||||
|
||||
28
common/src/activity-timeout.ts
Normal file
28
common/src/activity-timeout.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
export function createActivityTimeout(timeout: number, timeoutCallback: () => void) {
|
||||
let dataTimeout: NodeJS.Timeout;
|
||||
|
||||
let lastTime = Date.now();
|
||||
function resetActivityTimer() {
|
||||
lastTime = Date.now();
|
||||
}
|
||||
|
||||
function clearActivityTimer() {
|
||||
clearInterval(dataTimeout);
|
||||
}
|
||||
|
||||
if (timeout) {
|
||||
dataTimeout = setInterval(() => {
|
||||
if (Date.now() > lastTime + timeout) {
|
||||
clearInterval(dataTimeout);
|
||||
dataTimeout = undefined;
|
||||
timeoutCallback();
|
||||
}
|
||||
}, timeout);
|
||||
}
|
||||
|
||||
resetActivityTimer();
|
||||
return {
|
||||
resetActivityTimer,
|
||||
clearActivityTimer,
|
||||
}
|
||||
}
|
||||
@@ -89,27 +89,44 @@ export const H264_NAL_TYPE_FU_B = 29;
|
||||
export const H264_NAL_TYPE_MTAP16 = 26;
|
||||
export const H264_NAL_TYPE_MTAP32 = 27;
|
||||
|
||||
export const H265_NAL_TYPE_AGG = 48;
|
||||
export const H265_NAL_TYPE_VPS = 32;
|
||||
export const H265_NAL_TYPE_SPS = 33;
|
||||
export const H265_NAL_TYPE_PPS = 34;
|
||||
export const H265_NAL_TYPE_IDR_N = 19;
|
||||
export const H265_NAL_TYPE_IDR_W = 20;
|
||||
|
||||
export function findH264NaluType(streamChunk: StreamChunk, naluType: number) {
|
||||
if (streamChunk.type !== 'h264')
|
||||
return;
|
||||
return findH264NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
|
||||
}
|
||||
|
||||
export function findH265NaluType(streamChunk: StreamChunk, naluType: number) {
|
||||
if (streamChunk.type !== 'h265')
|
||||
return;
|
||||
return findH265NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
|
||||
}
|
||||
|
||||
export function parseH264NaluType(firstNaluByte: number) {
|
||||
return firstNaluByte & 0x1f;
|
||||
}
|
||||
|
||||
export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
|
||||
const checkNaluType = nalu[0] & 0x1f;
|
||||
const checkNaluType = parseH264NaluType(nalu[0]);
|
||||
if (checkNaluType === H264_NAL_TYPE_STAP_A) {
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = nalu[pos] & 0x1f;
|
||||
const stapaType = parseH264NaluType(nalu[pos]);
|
||||
if (stapaType === naluType)
|
||||
return nalu.subarray(pos, pos + naluLength);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (checkNaluType === H264_NAL_TYPE_FU_A) {
|
||||
const fuaType = nalu[1] & 0x1f;
|
||||
const fuaType = parseH264NaluType(nalu[1]);
|
||||
const isFuStart = !!(nalu[1] & 0x80);
|
||||
|
||||
if (fuaType === naluType && isFuStart)
|
||||
@@ -121,39 +138,52 @@ export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
|
||||
return;
|
||||
}
|
||||
|
||||
function parseH265NaluType(firstNaluByte: number) {
|
||||
return (firstNaluByte & 0b01111110) >> 1;
|
||||
}
|
||||
|
||||
export function findH265NaluTypeInNalu(nalu: Buffer, naluType: number) {
|
||||
const checkNaluType = parseH265NaluType(nalu[0]);
|
||||
if (checkNaluType === H265_NAL_TYPE_AGG) {
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = parseH265NaluType(nalu[pos]);
|
||||
if (stapaType === naluType)
|
||||
return nalu.subarray(pos, pos + naluLength);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (checkNaluType === naluType) {
|
||||
return nalu;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
export function getNaluTypes(streamChunk: StreamChunk) {
|
||||
if (streamChunk.type !== 'h264')
|
||||
return new Set<number>();
|
||||
return getNaluTypesInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
|
||||
}
|
||||
|
||||
export function getNaluFragmentInformation(nalu: Buffer) {
|
||||
const naluType = nalu[0] & 0x1f;
|
||||
const fua = naluType === H264_NAL_TYPE_FU_A;
|
||||
return {
|
||||
fua,
|
||||
fuaStart: fua && !!(nalu[1] & 0x80),
|
||||
fuaEnd: fua && !!(nalu[1] & 0x40),
|
||||
}
|
||||
}
|
||||
|
||||
export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
|
||||
const ret = new Set<number>();
|
||||
const naluType = nalu[0] & 0x1f;
|
||||
const naluType = parseH264NaluType(nalu[0]);
|
||||
if (naluType === H264_NAL_TYPE_STAP_A) {
|
||||
ret.add(H264_NAL_TYPE_STAP_A);
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = nalu[pos] & 0x1f;
|
||||
const stapaType = parseH264NaluType(nalu[pos]);
|
||||
ret.add(stapaType);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (naluType === H264_NAL_TYPE_FU_A) {
|
||||
ret.add(H264_NAL_TYPE_FU_A);
|
||||
const fuaType = nalu[1] & 0x1f;
|
||||
const fuaType = parseH264NaluType(nalu[1]);
|
||||
if (fuaRequireStart) {
|
||||
const isFuStart = !!(nalu[1] & 0x80);
|
||||
if (isFuStart)
|
||||
@@ -175,6 +205,33 @@ export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaReq
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function getH265NaluTypes(streamChunk: StreamChunk) {
|
||||
if (streamChunk.type !== 'h265')
|
||||
return new Set<number>();
|
||||
return getNaluTypesInH265Nalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
|
||||
}
|
||||
|
||||
export function getNaluTypesInH265Nalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
|
||||
const ret = new Set<number>();
|
||||
const naluType = parseH265NaluType(nalu[0]);
|
||||
if (naluType === H265_NAL_TYPE_AGG) {
|
||||
ret.add(H265_NAL_TYPE_AGG);
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = parseH265NaluType(nalu[pos]);
|
||||
ret.add(stapaType);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else {
|
||||
ret.add(naluType);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function createRtspParser(options?: StreamParserOptions): RtspStreamParser {
|
||||
let resolve: any;
|
||||
|
||||
@@ -195,12 +252,23 @@ export function createRtspParser(options?: StreamParserOptions): RtspStreamParse
|
||||
findSyncFrame(streamChunks: StreamChunk[]) {
|
||||
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
|
||||
const streamChunk = streamChunks[prebufferIndex];
|
||||
if (streamChunk.type !== 'h264') {
|
||||
continue;
|
||||
if (streamChunk.type === 'h264') {
|
||||
const naluTypes = getNaluTypes(streamChunk);
|
||||
if (naluTypes.has(H264_NAL_TYPE_SPS) || naluTypes.has(H264_NAL_TYPE_IDR)) {
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
}
|
||||
}
|
||||
else if (streamChunk.type === 'h265') {
|
||||
const naluTypes = getH265NaluTypes(streamChunk);
|
||||
|
||||
if (findH264NaluType(streamChunk, H264_NAL_TYPE_SPS) || findH264NaluType(streamChunk, H264_NAL_TYPE_IDR)) {
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
if (naluTypes.has(H265_NAL_TYPE_VPS)
|
||||
|| naluTypes.has(H265_NAL_TYPE_SPS)
|
||||
|| naluTypes.has(H265_NAL_TYPE_PPS)
|
||||
|| naluTypes.has(H265_NAL_TYPE_IDR_N)
|
||||
|| naluTypes.has(H265_NAL_TYPE_IDR_W)
|
||||
) {
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -540,6 +608,7 @@ export class RtspClient extends RtspBase {
|
||||
throw new Error('no WWW-Authenticate found');
|
||||
|
||||
const { BASIC } = await import('http-auth-utils');
|
||||
// @ts-ignore
|
||||
const { parseHTTPHeadersQuotedKeyValueSet } = await import('http-auth-utils/dist/utils');
|
||||
|
||||
if (this.wwwAuthenticate.includes('Basic')) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Home Assistant Addon Configuration
|
||||
name: Scrypted
|
||||
version: "v0.105.0-jammy-full"
|
||||
version: "v0.111.0-jammy-full"
|
||||
slug: scrypted
|
||||
description: Scrypted is a high performance home video integration and automation platform
|
||||
url: "https://github.com/koush/scrypted"
|
||||
|
||||
@@ -19,13 +19,13 @@ then
|
||||
|
||||
rm -rf /tmp/neo && mkdir -p /tmp/neo && cd /tmp/neo &&
|
||||
apt-get install -y ocl-icd-libopencl1 &&
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16510.2/intel-igc-core_1.0.16510.2_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16510.2/intel-igc-opencl_1.0.16510.2_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.13.29138.7/intel-level-zero-gpu-dbgsym_1.3.29138.7_amd64.ddeb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.13.29138.7/intel-level-zero-gpu_1.3.29138.7_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.13.29138.7/intel-opencl-icd-dbgsym_24.13.29138.7_amd64.ddeb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.13.29138.7/intel-opencl-icd_24.13.29138.7_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.13.29138.7/libigdgmm12_22.3.18_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16695.4/intel-igc-core_1.0.16695.4_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.16695.4/intel-igc-opencl_1.0.16695.4_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-level-zero-gpu-dbgsym_1.3.29377.6_amd64.ddeb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-level-zero-gpu_1.3.29377.6_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-opencl-icd-dbgsym_24.17.29377.6_amd64.ddeb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/intel-opencl-icd_24.17.29377.6_amd64.deb &&
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.17.29377.6/libigdgmm12_22.3.19_amd64.deb &&
|
||||
dpkg -i *.deb &&
|
||||
cd /tmp && rm -rf /tmp/neo &&
|
||||
apt-get -y dist-upgrade;
|
||||
|
||||
@@ -41,12 +41,19 @@ pct restore $VMID $SCRYPTED_TAR_ZST $@
|
||||
if [ "$?" != "0" ]
|
||||
then
|
||||
echo ""
|
||||
echo "pct restore failed"
|
||||
echo "The Scrypted container installation failed (pct restore error)."
|
||||
echo ""
|
||||
echo "This may be caused by the server's 'local' storage not supporting containers."
|
||||
echo "Try running this script again with a different storage device (local-lvm, local-zfs). For example:"
|
||||
echo "This may be because the server's 'local' storage device is not being a valid"
|
||||
echo "location for containers."
|
||||
echo "Try running this script again with a different storage device like"
|
||||
echo "'local-lvm' or 'local-zfs'."
|
||||
echo ""
|
||||
echo "#############################################################################"
|
||||
echo "Paste the following command into this shell to install to local-lvm instead:"
|
||||
echo ""
|
||||
echo "bash $0 --storage local-lvm"
|
||||
echo "#############################################################################"
|
||||
echo ""
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -70,7 +70,7 @@ async function getAuth(options: AuthFetchOptions, url: string | URL, method: str
|
||||
|
||||
export function createAuthFetch<B, M>(
|
||||
h: fetcher<B, M>,
|
||||
parser: (body: M, responseType: HttpFetchResponseType) => Promise<any>
|
||||
parser: (body: M, responseType: HttpFetchResponseType | undefined) => Promise<any>
|
||||
) {
|
||||
const authHttpFetch = async <T extends HttpFetchOptions<B>>(options: T & AuthFetchOptions): ReturnType<typeof h<T>> => {
|
||||
const method = getFetchMethod(options);
|
||||
@@ -99,7 +99,7 @@ export function createAuthFetch<B, M>(
|
||||
};
|
||||
}
|
||||
|
||||
let authenticateHeaders: string | string[] = initialResponse.headers.get('www-authenticate');
|
||||
let authenticateHeaders: string | string[] | null = initialResponse.headers.get('www-authenticate');
|
||||
if (!authenticateHeaders)
|
||||
throw new Error('Did not find WWW-Authenticate header.');
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
"inlineSources": true,
|
||||
"declaration": true,
|
||||
"resolveJsonModule": true,
|
||||
"strict": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
|
||||
14
packages/cli/package-lock.json
generated
14
packages/cli/package-lock.json
generated
@@ -10,7 +10,7 @@
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@scrypted/client": "^1.3.3",
|
||||
"@scrypted/types": "^0.2.99",
|
||||
"@scrypted/types": "^0.3.30",
|
||||
"engine.io-client": "^6.5.3",
|
||||
"readline-sync": "^1.4.10",
|
||||
"semver": "^7.5.4",
|
||||
@@ -101,15 +101,11 @@
|
||||
"rimraf": "^5.0.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@scrypted/client/node_modules/@scrypted/types": {
|
||||
"version": "0.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.3.4.tgz",
|
||||
"integrity": "sha512-k/YMx8lIWOkePgXfKW9POr12mb+erFU2JKxO7TW92GyW8ojUWw9VOc0PK6O9bybi0vhsEnvMFkO6pO6bAonsVA=="
|
||||
},
|
||||
"node_modules/@scrypted/types": {
|
||||
"version": "0.2.99",
|
||||
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.2.99.tgz",
|
||||
"integrity": "sha512-2J1FH7tpAW5X3rgA70gJ+z0HFM90c/tBA+JXdP1vI1d/0yVmh9TSxnHoCuADN4R2NQXHmoZ6Nbds9kKAQ/25XQ=="
|
||||
"version": "0.3.30",
|
||||
"resolved": "https://registry.npmjs.org/@scrypted/types/-/types-0.3.30.tgz",
|
||||
"integrity": "sha512-1k+JVSR6WSNmE/5mLdqfrTmV3uRbvZp0OwKb8ikNi39ysBuC000tQGcEdXZqhYqRgWdhDTWtxXe9XsYoAZGKmA==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/@socket.io/component-emitter": {
|
||||
"version": "3.1.0",
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@scrypted/client": "^1.3.3",
|
||||
"@scrypted/types": "^0.2.99",
|
||||
"@scrypted/types": "^0.3.30",
|
||||
"engine.io-client": "^6.5.3",
|
||||
"readline-sync": "^1.4.10",
|
||||
"semver": "^7.5.4",
|
||||
|
||||
@@ -160,11 +160,11 @@ async function main() {
|
||||
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(await pendingResult, ScryptedMimeTypes.FFmpegInput);
|
||||
if (ffmpegInput.url && ffmpegInput.urls?.[0]) {
|
||||
const url = new URL(ffmpegInput.url);
|
||||
if (url.hostname === '127.0.0.1' && ffmpegInput.urls?.[0]) {
|
||||
ffmpegInput.inputArguments = ffmpegInput.inputArguments.map(i => i === ffmpegInput.url ? ffmpegInput.urls?.[0] : i);
|
||||
if (url.hostname === '127.0.0.1' && ffmpegInput.urls?.[0] && ffmpegInput.inputArguments) {
|
||||
ffmpegInput.inputArguments = ffmpegInput.inputArguments.map(i => i === ffmpegInput.url && ffmpegInput.urls ? ffmpegInput.urls?.[0] : i);
|
||||
}
|
||||
}
|
||||
const args = [...ffmpegInput.inputArguments];
|
||||
const args = ffmpegInput.inputArguments ? [...ffmpegInput.inputArguments] : [];
|
||||
if (ffmpegInput.h264FilterArguments)
|
||||
args.push(...ffmpegInput.h264FilterArguments);
|
||||
console.log('ffplay', ...args);
|
||||
|
||||
@@ -90,7 +90,13 @@ export async function installServe(installVersion: string, ignoreError?: boolean
|
||||
const installJson = path.join(installDir, 'install.json');
|
||||
try {
|
||||
const { version } = JSON.parse(fs.readFileSync(installJson).toString());
|
||||
if (semver.parse(process.version).major !== semver.parse(version).major)
|
||||
const processSemver = semver.parse(process.version);
|
||||
if (!processSemver)
|
||||
throw new Error('error parsing process version');
|
||||
const installSemver = semver.parse(version);
|
||||
if (!installSemver)
|
||||
throw new Error('error parsing install.json version');
|
||||
if (processSemver.major !== installSemver.major)
|
||||
throw new Error('mismatch');
|
||||
}
|
||||
catch (e) {
|
||||
@@ -111,16 +117,32 @@ export async function installServe(installVersion: string, ignoreError?: boolean
|
||||
}
|
||||
|
||||
export async function serveMain(installVersion?: string) {
|
||||
let install = !!installVersion;
|
||||
const options = ((): { install: true; version: string } | { install: false } => {
|
||||
if (installVersion) {
|
||||
console.log(`Installing @scrypted/server@${installVersion}`);
|
||||
return {
|
||||
install: true,
|
||||
version: installVersion
|
||||
};
|
||||
}
|
||||
|
||||
if (!fs.existsSync('node_modules/@scrypted/server')) {
|
||||
console.log('Package @scrypted/server not found. Installing.');
|
||||
return {
|
||||
install: true,
|
||||
version: 'latest',
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
install: false,
|
||||
}
|
||||
})();
|
||||
|
||||
const { installDir, volume } = cwdInstallDir();
|
||||
if (!fs.existsSync('node_modules/@scrypted/server')) {
|
||||
install = true;
|
||||
installVersion ||= 'latest';
|
||||
console.log('Package @scrypted/server not found. Installing.');
|
||||
}
|
||||
if (install) {
|
||||
await installServe(installVersion, true);
|
||||
|
||||
if (options.install) {
|
||||
await installServe(options.version, true);
|
||||
}
|
||||
|
||||
// todo: remove at some point after core lxc updater rolls out.
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
"inlineSources": true,
|
||||
"declaration": true,
|
||||
"moduleResolution": "Node16",
|
||||
"strict": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
|
||||
1280
plugins/cloud/package-lock.json
generated
1280
plugins/cloud/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -42,7 +42,7 @@
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"bpmux": "^8.2.1",
|
||||
"cloudflared": "^0.4.0",
|
||||
"cloudflared": "^0.5.2",
|
||||
"exponential-backoff": "^3.1.1",
|
||||
"http-proxy": "^1.18.1",
|
||||
"nat-upnp": "file:./external/node-nat-upnp"
|
||||
@@ -51,7 +51,7 @@
|
||||
"@types/http-proxy": "^1.17.14",
|
||||
"@types/ip": "^1.1.3",
|
||||
"@types/nat-upnp": "^1.1.5",
|
||||
"@types/node": "^20.11.19"
|
||||
"@types/node": "^20.14.6"
|
||||
},
|
||||
"version": "0.2.13"
|
||||
"version": "0.2.15"
|
||||
}
|
||||
|
||||
@@ -531,8 +531,9 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
|
||||
throw new Error('@scrypted/cloud is not logged in.');
|
||||
const q = qsstringify({
|
||||
scope: local.pathname,
|
||||
serverId: this.storageSettings.values.serverId,
|
||||
ttl,
|
||||
})
|
||||
});
|
||||
const scope = await httpFetch({
|
||||
url: `https://${this.getHostname()}/_punch/scope?${q}`,
|
||||
headers: {
|
||||
@@ -951,13 +952,13 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
|
||||
}
|
||||
|
||||
async startCloudflared() {
|
||||
if (!this.storageSettings.values.cloudflareEnabled) {
|
||||
this.console.log('cloudflared is disabled.');
|
||||
return;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
try {
|
||||
if (!this.storageSettings.values.cloudflareEnabled) {
|
||||
this.console.log('cloudflared is disabled.');
|
||||
return;
|
||||
}
|
||||
|
||||
this.console.log('starting cloudflared');
|
||||
this.cloudflared = await backOff(async () => {
|
||||
const pluginVolume = process.env.SCRYPTED_PLUGIN_VOLUME;
|
||||
@@ -1057,12 +1058,13 @@ class ScryptedCloud extends ScryptedDeviceBase implements OauthClient, Settings,
|
||||
maxDelay: 300000,
|
||||
});
|
||||
|
||||
await once(this.cloudflared.child, 'exit');
|
||||
throw new Error('cloudflared exited.');
|
||||
await once(this.cloudflared.child, 'exit').catch(() => { });
|
||||
// the successfully started cloudflared process may exit at some point, loop and allow it to restart.
|
||||
this.console.error('cloudflared exited');
|
||||
}
|
||||
catch (e) {
|
||||
// this error may be reached if the cloudflared backoff fails.
|
||||
this.console.error('cloudflared error', e);
|
||||
throw e;
|
||||
}
|
||||
finally {
|
||||
this.cloudflared = undefined;
|
||||
|
||||
4
plugins/core/package-lock.json
generated
4
plugins/core/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.3.25",
|
||||
"version": "0.3.28",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.3.25",
|
||||
"version": "0.3.28",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.3.25",
|
||||
"version": "0.3.28",
|
||||
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -54,7 +54,13 @@ export async function checkLxcDependencies() {
|
||||
r(stdout + '\n' + stderr);
|
||||
}));
|
||||
|
||||
if (output.includes('Version: 23')) {
|
||||
if (
|
||||
// apt
|
||||
output.includes('Version: 23')
|
||||
// was installed via script at some point
|
||||
|| output.includes('Version: 24.13.29138.7')
|
||||
// current script version: 24.17.29377.6
|
||||
) {
|
||||
const cp = child_process.spawn('sh', ['-c', 'curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash']);
|
||||
const [exitCode] = await once(cp, 'exit');
|
||||
if (exitCode !== 0)
|
||||
|
||||
@@ -26,6 +26,7 @@ export function loginScrypted(username: string, password: string, change_passwor
|
||||
username,
|
||||
password,
|
||||
change_password,
|
||||
maxAge: 7 * 24 * 60 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -161,10 +161,10 @@ export default {
|
||||
let t = ``;
|
||||
let toffset = 0;
|
||||
if (detection.score && detection.className !== 'motion') {
|
||||
t += `<tspan x='${x}' dy='${toffset}em'>${Math.round(detection.score * 100) / 100}</tspan>`
|
||||
t += `<tspan x='${x}' dy='${toffset}em'>${Math.round((detection.labelScore || detection.score) * 100) / 100}</tspan>`
|
||||
toffset -= 1.2;
|
||||
}
|
||||
const tname = detection.className + (detection.id ? `: ${detection.id}` : '')
|
||||
const tname = (detection.label || detection.className) + (detection.id ? `: ${detection.id}` : '')
|
||||
t += `<tspan x='${x}' dy='${toffset}em'>${tname}</tspan>`
|
||||
|
||||
const fs = 20;
|
||||
|
||||
10
plugins/coreml/package-lock.json
generated
10
plugins/coreml/package-lock.json
generated
@@ -1,25 +1,25 @@
|
||||
{
|
||||
"name": "@scrypted/coreml",
|
||||
"version": "0.1.53",
|
||||
"version": "0.1.65",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/coreml",
|
||||
"version": "0.1.53",
|
||||
"version": "0.1.65",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.101",
|
||||
"version": "0.3.31",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.6.5",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.15.9",
|
||||
@@ -65,7 +65,7 @@
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/stringify-object": "^4.0.0",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.6.5",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.15.9",
|
||||
|
||||
@@ -42,5 +42,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.53"
|
||||
"version": "0.1.65"
|
||||
}
|
||||
|
||||
@@ -26,17 +26,16 @@ predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "CoreML-Predict")
|
||||
|
||||
availableModels = [
|
||||
"Default",
|
||||
"scrypted_yolov10m_320",
|
||||
"scrypted_yolov10n_320",
|
||||
"scrypted_yolov10n",
|
||||
"scrypted_yolo_nas_s_320",
|
||||
"scrypted_yolov9e_320",
|
||||
"scrypted_yolov9c_320",
|
||||
"scrypted_yolov9c",
|
||||
"scrypted_yolov9s_320",
|
||||
"scrypted_yolov9t_320",
|
||||
"scrypted_yolov6n_320",
|
||||
"scrypted_yolov6n",
|
||||
"scrypted_yolov6s_320",
|
||||
"scrypted_yolov6s",
|
||||
"scrypted_yolov8n_320",
|
||||
"scrypted_yolov8n",
|
||||
"ssdlite_mobilenet_v2",
|
||||
"yolov4-tiny",
|
||||
]
|
||||
@@ -80,11 +79,11 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
|
||||
self.storage.setItem("model", "Default")
|
||||
model = "scrypted_yolov9c_320"
|
||||
self.yolo = "yolo" in model
|
||||
self.scrypted_yolov10n = "scrypted_yolov10n" in model
|
||||
self.scrypted_yolov10n = "scrypted_yolov10" in model
|
||||
self.scrypted_yolo_nas = "scrypted_yolo_nas" in model
|
||||
self.scrypted_yolo = "scrypted_yolo" in model
|
||||
self.scrypted_model = "scrypted" in model
|
||||
model_version = "v7"
|
||||
model_version = "v8"
|
||||
mlmodel = "model" if self.scrypted_yolo else model
|
||||
|
||||
print(f"model: {model}")
|
||||
|
||||
@@ -71,7 +71,7 @@ class CoreMLFaceRecognition(FaceRecognizeDetection):
|
||||
def predict():
|
||||
model, inputName = self.faceModel
|
||||
out_dict = model.predict({inputName: input})
|
||||
results = out_dict["var_2167"][0]
|
||||
results = list(out_dict.values())[0][0]
|
||||
return results
|
||||
results = await asyncio.get_event_loop().run_in_executor(
|
||||
self.recogExecutor, lambda: predict()
|
||||
|
||||
@@ -20,7 +20,7 @@ class CoreMLTextRecognition(TextRecognition):
|
||||
self.recogExecutor = concurrent.futures.ThreadPoolExecutor(1, "recog-text")
|
||||
|
||||
def downloadModel(self, model: str):
|
||||
model_version = "v7"
|
||||
model_version = "v8"
|
||||
mlmodel = "model"
|
||||
|
||||
files = [
|
||||
|
||||
@@ -1 +1 @@
|
||||
../../tensorflow-lite/src/detect
|
||||
../../openvino/src/detect/
|
||||
@@ -1 +1 @@
|
||||
../../tensorflow-lite/src/predict
|
||||
../../openvino/src/predict
|
||||
@@ -1 +1 @@
|
||||
opencv-python
|
||||
opencv-python==4.10.0.82
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# 2024-04-23 - modify timestamp to force pip reinstall
|
||||
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
|
||||
numpy==1.26.4
|
||||
coremltools==7.1
|
||||
Pillow>=5.4.1
|
||||
Pillow==10.3.0
|
||||
|
||||
4
plugins/hikvision/package-lock.json
generated
4
plugins/hikvision/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/hikvision",
|
||||
"version": "0.0.147",
|
||||
"version": "0.0.149",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/hikvision",
|
||||
"version": "0.0.147",
|
||||
"version": "0.0.149",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/hikvision",
|
||||
"version": "0.0.147",
|
||||
"version": "0.0.149",
|
||||
"description": "Hikvision Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
|
||||
@@ -189,7 +189,9 @@ export class HikvisionCameraAPI {
|
||||
continue;
|
||||
if (ignore === boundaryEnd)
|
||||
continue;
|
||||
if (ignore !== boundary) {
|
||||
if (ignore !== boundary
|
||||
// older hikvision nvr send a boundary in the headers, but then use a totally different constant boundary value
|
||||
&& ignore != "--boundary") {
|
||||
this.console.error('expected boundary but found', ignore);
|
||||
throw new Error('expected boundary');
|
||||
}
|
||||
|
||||
@@ -134,7 +134,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
|
||||
const xml = await xml2js.parseStringPromise(data);
|
||||
|
||||
|
||||
const [channelId] = xml.EventNotificationAlert.channelID;
|
||||
const [channelId] = xml.EventNotificationAlert.channelID || xml.EventNotificationAlert.dynChannelID;
|
||||
if (!await checkCameraNumber(channelId)) {
|
||||
this.console.warn('chann fail')
|
||||
return;
|
||||
|
||||
@@ -32,10 +32,13 @@ If recordings dont work, it's generally because of a few reasons, **follow the s
|
||||
|
||||
### HomeKit Discovery and Pairing Issues
|
||||
|
||||
* Ensure all your Home hubs are online and updated. Power cycling them is recommended in case one is stuck.
|
||||
* Ensure all your Apple TV and Home Pods are online and updated. Power cycling them is recommended in case one is stuck.
|
||||
* Ensure your Apple TV and Home Pods are on the same subnet as the Scrypted server.
|
||||
* Ensure LAN/WLAN multicast is enabled on your router.
|
||||
* Ensure the iOS device you are using for pairing is on the same network (pairing will fail on cellular).
|
||||
* Ensure the Docker installation (if applicable) is using host networking. This configuration is the default if the official Scrypted Docker compose install script was used.
|
||||
* Try switching the mDNS advertiser used in the HomeKit plugin settings.
|
||||
* Try disabling IGMP Snooping on your router.
|
||||
|
||||
### HomeKit Live Streaming Timeout (Recordings may be working)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Entry, EntrySensor, ScryptedDevice, ScryptedDeviceType, ScryptedInterface } from '@scrypted/sdk';
|
||||
import { Brightness, Entry, EntrySensor, ScryptedDevice, ScryptedDeviceType, ScryptedInterface } from '@scrypted/sdk';
|
||||
import { addSupportedType, bindCharacteristic, DummyDevice, } from '../common';
|
||||
import { Characteristic, CharacteristicEventTypes, CharacteristicSetCallback, CharacteristicValue, NodeCallback, Service } from '../hap';
|
||||
import { makeAccessory } from './common';
|
||||
@@ -7,38 +7,62 @@ import type { HomeKitPlugin } from "../main";
|
||||
addSupportedType({
|
||||
type: ScryptedDeviceType.WindowCovering,
|
||||
probe(device: DummyDevice): boolean {
|
||||
return device.interfaces.includes(ScryptedInterface.Entry) && device.interfaces.includes(ScryptedInterface.EntrySensor);
|
||||
return (device.interfaces.includes(ScryptedInterface.Entry) && device.interfaces.includes(ScryptedInterface.EntrySensor))
|
||||
|| device.interfaces.includes(ScryptedInterface.Brightness);
|
||||
},
|
||||
getAccessory: async (device: ScryptedDevice & Entry & EntrySensor, homekitPlugin: HomeKitPlugin) => {
|
||||
getAccessory: async (device: ScryptedDevice & Entry & EntrySensor & Brightness, homekitPlugin: HomeKitPlugin) => {
|
||||
const accessory = makeAccessory(device, homekitPlugin);
|
||||
|
||||
const service = accessory.addService(Service.WindowCovering, device.name);
|
||||
|
||||
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.CurrentPosition,
|
||||
() => !!device.entryOpen ? 100 : 0);
|
||||
if (device.interfaces.includes(ScryptedInterface.Entry)) {
|
||||
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.CurrentPosition,
|
||||
() => !!device.entryOpen ? 100 : 0);
|
||||
|
||||
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.TargetPosition,
|
||||
() => !!device.entryOpen ? 100 : 0);
|
||||
let targetPosition = !!device.entryOpen ? 100 : 0;
|
||||
bindCharacteristic(device, ScryptedInterface.EntrySensor, service, Characteristic.TargetPosition,
|
||||
() => targetPosition);
|
||||
|
||||
let props = {
|
||||
minValue: 0,
|
||||
maxValue: 100,
|
||||
minStep: 100,
|
||||
};
|
||||
let targetState = !!device.entryOpen ? 100 : 0;
|
||||
service.getCharacteristic(Characteristic.TargetPosition)
|
||||
.setProps(props)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
if (value === 100) {
|
||||
targetState = 100;
|
||||
device.openEntry();
|
||||
}
|
||||
else {
|
||||
targetState = 0;
|
||||
device.closeEntry();
|
||||
}
|
||||
})
|
||||
const props = {
|
||||
minValue: 0,
|
||||
maxValue: 100,
|
||||
minStep: 100,
|
||||
};
|
||||
service.getCharacteristic(Characteristic.TargetPosition)
|
||||
.setProps(props)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
if (value === 100) {
|
||||
targetPosition = 100;
|
||||
device.openEntry();
|
||||
}
|
||||
else {
|
||||
targetPosition = 0;
|
||||
device.closeEntry();
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (device.interfaces.includes(ScryptedInterface.Brightness)) {
|
||||
bindCharacteristic(device, ScryptedInterface.Brightness, service, Characteristic.CurrentPosition,
|
||||
() => device.brightness || 0);
|
||||
|
||||
let targetPosition = device.brightness || 0;
|
||||
bindCharacteristic(device, ScryptedInterface.Brightness, service, Characteristic.TargetPosition,
|
||||
() => targetPosition);
|
||||
|
||||
const props = {
|
||||
minValue: 0,
|
||||
maxValue: 100,
|
||||
minStep: 1,
|
||||
};
|
||||
service.getCharacteristic(Characteristic.TargetPosition)
|
||||
.setProps(props)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
targetPosition = value as number;
|
||||
device.setBrightness(targetPosition);
|
||||
});
|
||||
}
|
||||
|
||||
return accessory;
|
||||
}
|
||||
|
||||
14
plugins/onnx/.vscode/settings.json
vendored
14
plugins/onnx/.vscode/settings.json
vendored
@@ -1,18 +1,20 @@
|
||||
|
||||
{
|
||||
// docker installation
|
||||
// "scrypted.debugHost": "koushik-ubuntuvm",
|
||||
// "scrypted.serverRoot": "/server",
|
||||
// "scrypted.debugHost": "koushik-ubuntuvm",
|
||||
// "scrypted.serverRoot": "/home/koush/.scrypted",
|
||||
"scrypted.debugHost": "koushik-ubuntuvm",
|
||||
"scrypted.serverRoot": "/server",
|
||||
|
||||
// lxc
|
||||
// "scrypted.debugHost": "scrypted-server",
|
||||
// "scrypted.serverRoot": "/root/.scrypted",
|
||||
|
||||
// pi local installation
|
||||
// "scrypted.debugHost": "192.168.2.119",
|
||||
// "scrypted.serverRoot": "/home/pi/.scrypted",
|
||||
|
||||
// local checkout
|
||||
"scrypted.debugHost": "127.0.0.1",
|
||||
"scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "127.0.0.1",
|
||||
// "scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "koushik-winvm",
|
||||
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
|
||||
|
||||
|
||||
4
plugins/onnx/package-lock.json
generated
4
plugins/onnx/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/openvino",
|
||||
"version": "0.1.92",
|
||||
"version": "0.1.103",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/openvino",
|
||||
"version": "0.1.92",
|
||||
"version": "0.1.103",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -42,5 +42,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.92"
|
||||
"version": "0.1.103"
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
../../tensorflow-lite/src/detect
|
||||
../../openvino/src/detect/
|
||||
@@ -29,17 +29,15 @@ except:
|
||||
|
||||
availableModels = [
|
||||
"Default",
|
||||
"scrypted_yolov10m_320",
|
||||
"scrypted_yolov10n_320",
|
||||
"scrypted_yolov10n",
|
||||
"scrypted_yolo_nas_s_320",
|
||||
"scrypted_yolov6n_320",
|
||||
"scrypted_yolov6n",
|
||||
"scrypted_yolov6s_320",
|
||||
"scrypted_yolov6s",
|
||||
"scrypted_yolov9c_320",
|
||||
"scrypted_yolov9c",
|
||||
"scrypted_yolov9s_320",
|
||||
"scrypted_yolov9t_320",
|
||||
"scrypted_yolov8n_320",
|
||||
"scrypted_yolov8n",
|
||||
]
|
||||
|
||||
def parse_labels(names):
|
||||
@@ -59,7 +57,7 @@ class ONNXPlugin(
|
||||
if model == "Default" or model not in availableModels:
|
||||
if model != "Default":
|
||||
self.storage.setItem("model", "Default")
|
||||
model = "scrypted_yolov8n_320"
|
||||
model = "scrypted_yolov9c_320"
|
||||
self.yolo = "yolo" in model
|
||||
self.scrypted_yolov10 = "scrypted_yolov10" in model
|
||||
self.scrypted_yolo_nas = "scrypted_yolo_nas" in model
|
||||
@@ -70,9 +68,9 @@ class ONNXPlugin(
|
||||
|
||||
onnxmodel = model if self.scrypted_yolo_nas else "best" if self.scrypted_model else model
|
||||
|
||||
model_version = "v2"
|
||||
model_version = "v3"
|
||||
onnxfile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/onnx-models/main/{model}/{onnxmodel}.onnx",
|
||||
f"https://github.com/koush/onnx-models/raw/main/{model}/{onnxmodel}.onnx",
|
||||
f"{model_version}/{model}/{onnxmodel}.onnx",
|
||||
)
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ class ONNXFaceRecognition(FaceRecognizeDetection):
|
||||
onnxmodel = "best" if "scrypted" in model else model
|
||||
model_version = "v1"
|
||||
onnxfile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/onnx-models/main/{model}/{onnxmodel}.onnx",
|
||||
f"https://github.com/koush/onnx-models/raw/main/{model}/{onnxmodel}.onnx",
|
||||
f"{model_version}/{model}/{onnxmodel}.onnx",
|
||||
)
|
||||
print(onnxfile)
|
||||
|
||||
@@ -21,9 +21,9 @@ class ONNXTextRecognition(TextRecognition):
|
||||
|
||||
def downloadModel(self, model: str):
|
||||
onnxmodel = model
|
||||
model_version = "v3"
|
||||
model_version = "v4"
|
||||
onnxfile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/onnx-models/main/{model}/{onnxmodel}.onnx",
|
||||
f"https://github.com/koush/onnx-models/raw/main/{model}/{onnxmodel}.onnx",
|
||||
f"{model_version}/{model}/{onnxmodel}.onnx",
|
||||
)
|
||||
print(onnxfile)
|
||||
|
||||
@@ -1 +1 @@
|
||||
../../tensorflow-lite/src/predict
|
||||
../../openvino/src/predict
|
||||
@@ -1 +1 @@
|
||||
opencv-python
|
||||
opencv-python==4.10.0.82
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
|
||||
numpy==1.26.4
|
||||
|
||||
# uncomment to require cuda 12, but most stuff is still targetting cuda 11.
|
||||
# however, stuff targetted for cuda 11 can still run on cuda 12.
|
||||
# --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-12/pypi/simple/
|
||||
@@ -7,7 +10,4 @@ onnxruntime; 'linux' not in sys_platform or platform_machine != 'x86_64'
|
||||
# nightly?
|
||||
# ort-nightly-gpu==1.17.3.dev20240409002
|
||||
|
||||
# pillow-simd is available on x64 linux
|
||||
# pillow-simd confirmed not building with arm64 linux or apple silicon
|
||||
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
|
||||
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
|
||||
Pillow==10.3.0
|
||||
|
||||
4
plugins/opencv/package-lock.json
generated
4
plugins/opencv/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/opencv",
|
||||
"version": "0.0.90",
|
||||
"version": "0.0.91",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/opencv",
|
||||
"version": "0.0.90",
|
||||
"version": "0.0.91",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -37,5 +37,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.0.90"
|
||||
"version": "0.0.91"
|
||||
}
|
||||
|
||||
@@ -1,13 +1,5 @@
|
||||
# plugin
|
||||
numpy>=1.16.2
|
||||
|
||||
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
|
||||
numpy==1.26.4
|
||||
imutils>=0.5.0
|
||||
|
||||
# locked to version because 4.8.0.76 is broken.
|
||||
# todo: check newer versions.
|
||||
opencv-python==4.8.0.74
|
||||
|
||||
# pillow-simd is available on x64 linux
|
||||
# pillow-simd confirmed not building with arm64 linux or apple silicon
|
||||
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
|
||||
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
|
||||
opencv-python==4.10.0.82
|
||||
Pillow==10.3.0
|
||||
|
||||
8
plugins/openvino/.vscode/settings.json
vendored
8
plugins/openvino/.vscode/settings.json
vendored
@@ -5,16 +5,16 @@
|
||||
// "scrypted.serverRoot": "/server",
|
||||
|
||||
// proxmox installation
|
||||
// "scrypted.debugHost": "scrypted-server",
|
||||
// "scrypted.serverRoot": "/root/.scrypted",
|
||||
"scrypted.debugHost": "scrypted-server",
|
||||
"scrypted.serverRoot": "/root/.scrypted",
|
||||
|
||||
// pi local installation
|
||||
// "scrypted.debugHost": "192.168.2.119",
|
||||
// "scrypted.serverRoot": "/home/pi/.scrypted",
|
||||
|
||||
// local checkout
|
||||
"scrypted.debugHost": "127.0.0.1",
|
||||
"scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "127.0.0.1",
|
||||
// "scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "koushik-winvm",
|
||||
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
|
||||
|
||||
|
||||
4
plugins/openvino/package-lock.json
generated
4
plugins/openvino/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/openvino",
|
||||
"version": "0.1.88",
|
||||
"version": "0.1.104",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/openvino",
|
||||
"version": "0.1.88",
|
||||
"version": "0.1.104",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -42,5 +42,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.88"
|
||||
"version": "0.1.104"
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ def calculate_y_change(original_height, skew_angle_radians):
|
||||
|
||||
return y_change
|
||||
|
||||
async def prepare_text_result(d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float):
|
||||
async def prepare_text_result(d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float, deskew_height: float):
|
||||
textImage = await crop_text(d, image)
|
||||
|
||||
skew_height_change = calculate_y_change(d["boundingBox"][3], skew_angle)
|
||||
@@ -57,19 +57,29 @@ async def prepare_text_result(d: ObjectDetectionResult, image: scrypted_sdk.Imag
|
||||
textImage = skew_image(textImage, skew_angle)
|
||||
# crop skew_height_change from top
|
||||
if skew_height_change > 0:
|
||||
textImage = textImage.crop((0, 0, textImage.width, textImage.height - skew_height_change))
|
||||
textImage = textImage.crop((0, 0, textImage.width, deskew_height))
|
||||
elif skew_height_change < 0:
|
||||
textImage = textImage.crop((0, -skew_height_change, textImage.width, textImage.height))
|
||||
textImage = textImage.crop((0, textImage.height - deskew_height, textImage.width, textImage.height))
|
||||
|
||||
new_height = 64
|
||||
target_height = 64
|
||||
height_padding = 3
|
||||
new_height = target_height - height_padding * 2
|
||||
new_width = int(textImage.width * new_height / textImage.height)
|
||||
textImage = textImage.resize((new_width, new_height), resample=Image.LANCZOS).convert("L")
|
||||
|
||||
new_width = 256
|
||||
new_width = 384
|
||||
# average the top pixels
|
||||
edge_color = textImage.getpixel((0, textImage.height // 2))
|
||||
# average the bottom pixels
|
||||
edge_color += textImage.getpixel((textImage.width - 1, textImage.height // 2))
|
||||
# average the right pixels
|
||||
edge_color += textImage.getpixel((textImage.width // 2, 0))
|
||||
# average the left pixels
|
||||
edge_color += textImage.getpixel((textImage.width // 2, textImage.height - 1))
|
||||
edge_color = edge_color // 4
|
||||
|
||||
# calculate padding dimensions
|
||||
padding = (0, 0, new_width - textImage.width, 0)
|
||||
# todo: clamp entire edge rather than just center
|
||||
edge_color = textImage.getpixel((textImage.width - 1, textImage.height // 2))
|
||||
padding = (0, height_padding, new_width - textImage.width, height_padding)
|
||||
# pad image
|
||||
textImage = ImageOps.expand(textImage, padding, fill=edge_color)
|
||||
# pil to numpy
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../tensorflow-lite/src/detect
|
||||
@@ -30,17 +30,16 @@ prepareExecutor = concurrent.futures.ThreadPoolExecutor(1, "OpenVINO-Prepare")
|
||||
|
||||
availableModels = [
|
||||
"Default",
|
||||
"scrypted_yolov10m_320",
|
||||
"scrypted_yolov10s_320",
|
||||
"scrypted_yolov10n_320",
|
||||
"scrypted_yolov10n",
|
||||
"scrypted_yolo_nas_s_320",
|
||||
"scrypted_yolov6n_320",
|
||||
"scrypted_yolov6n",
|
||||
"scrypted_yolov6s_320",
|
||||
"scrypted_yolov6s",
|
||||
"scrypted_yolov9c_320",
|
||||
"scrypted_yolov9c",
|
||||
"scrypted_yolov9s_320",
|
||||
"scrypted_yolov9t_320",
|
||||
"scrypted_yolov8n_320",
|
||||
"scrypted_yolov8n",
|
||||
"ssd_mobilenet_v1_coco",
|
||||
"ssdlite_mobilenet_v2",
|
||||
"yolo-v3-tiny-tf",
|
||||
@@ -138,7 +137,7 @@ class OpenVINOPlugin(
|
||||
if model == "Default" or model not in availableModels:
|
||||
if model != "Default":
|
||||
self.storage.setItem("model", "Default")
|
||||
model = "scrypted_yolov8n_320"
|
||||
model = "scrypted_yolov9t_320"
|
||||
self.yolo = "yolo" in model
|
||||
self.scrypted_yolov10 = "scrypted_yolov10" in model
|
||||
self.scrypted_yolo_nas = "scrypted_yolo_nas" in model
|
||||
@@ -152,31 +151,31 @@ class OpenVINOPlugin(
|
||||
|
||||
model_version = "v5"
|
||||
xmlFile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml",
|
||||
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.xml",
|
||||
f"{model_version}/{model}/{precision}/{ovmodel}.xml",
|
||||
)
|
||||
binFile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin",
|
||||
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.bin",
|
||||
f"{model_version}/{model}/{precision}/{ovmodel}.bin",
|
||||
)
|
||||
if self.scrypted_yolo_nas:
|
||||
labelsFile = self.downloadFile(
|
||||
"https://raw.githubusercontent.com/koush/openvino-models/main/scrypted_nas_labels.txt",
|
||||
"https://github.com/koush/openvino-models/raw/main/scrypted_nas_labels.txt",
|
||||
"scrypted_nas_labels.txt",
|
||||
)
|
||||
elif self.scrypted_model:
|
||||
labelsFile = self.downloadFile(
|
||||
"https://raw.githubusercontent.com/koush/openvino-models/main/scrypted_labels.txt",
|
||||
"https://github.com/koush/openvino-models/raw/main/scrypted_labels.txt",
|
||||
"scrypted_labels.txt",
|
||||
)
|
||||
elif self.yolo:
|
||||
labelsFile = self.downloadFile(
|
||||
"https://raw.githubusercontent.com/koush/openvino-models/main/coco_80cl.txt",
|
||||
"https://github.com/koush/openvino-models/raw/main/coco_80cl.txt",
|
||||
"coco_80cl.txt",
|
||||
)
|
||||
else:
|
||||
labelsFile = self.downloadFile(
|
||||
"https://raw.githubusercontent.com/koush/openvino-models/main/coco_labels.txt",
|
||||
"https://github.com/koush/openvino-models/raw/main/coco_labels.txt",
|
||||
"coco_labels.txt",
|
||||
)
|
||||
|
||||
|
||||
@@ -26,11 +26,11 @@ class OpenVINOFaceRecognition(FaceRecognizeDetection):
|
||||
precision = self.plugin.precision
|
||||
model_version = "v5"
|
||||
xmlFile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml",
|
||||
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.xml",
|
||||
f"{model_version}/{model}/{precision}/{ovmodel}.xml",
|
||||
)
|
||||
binFile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin",
|
||||
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.bin",
|
||||
f"{model_version}/{model}/{precision}/{ovmodel}.bin",
|
||||
)
|
||||
print(xmlFile, binFile)
|
||||
|
||||
@@ -23,13 +23,13 @@ class OpenVINOTextRecognition(TextRecognition):
|
||||
def downloadModel(self, model: str):
|
||||
ovmodel = "best"
|
||||
precision = self.plugin.precision
|
||||
model_version = "v5"
|
||||
model_version = "v6"
|
||||
xmlFile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml",
|
||||
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.xml",
|
||||
f"{model_version}/{model}/{precision}/{ovmodel}.xml",
|
||||
)
|
||||
binFile = self.downloadFile(
|
||||
f"https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin",
|
||||
f"https://github.com/koush/openvino-models/raw/main/{model}/{precision}/{ovmodel}.bin",
|
||||
f"{model_version}/{model}/{precision}/{ovmodel}.bin",
|
||||
)
|
||||
print(xmlFile, binFile)
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../tensorflow-lite/src/predict
|
||||
@@ -145,8 +145,8 @@ class PredictPlugin(DetectPlugin):
|
||||
for i, result in enumerate(results):
|
||||
batch[i][1].set_result(result)
|
||||
except Exception as e:
|
||||
for i, result in enumerate(results):
|
||||
batch[i][1].set_exception(e)
|
||||
for input in batch:
|
||||
input[1].set_exception(e)
|
||||
|
||||
async def flush_batch(self):
|
||||
self.batch_flush = None
|
||||
@@ -38,6 +38,7 @@ def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text,
|
||||
nLabels, labels, stats, centroids = cv2.connectedComponentsWithStats(text_score_comb.astype(np.uint8), connectivity=4)
|
||||
|
||||
det = []
|
||||
scores = []
|
||||
mapper = []
|
||||
for k in range(1,nLabels):
|
||||
# size filtering
|
||||
@@ -45,7 +46,8 @@ def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text,
|
||||
if size < 10: continue
|
||||
|
||||
# thresholding
|
||||
if np.max(textmap[labels==k]) < text_threshold: continue
|
||||
score = np.max(textmap[labels==k])
|
||||
if score < text_threshold: continue
|
||||
|
||||
# make segmentation map
|
||||
segmap = np.zeros(textmap.shape, dtype=np.uint8)
|
||||
@@ -89,8 +91,9 @@ def getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text,
|
||||
box = np.array(box)
|
||||
|
||||
det.append(box)
|
||||
scores.append(score)
|
||||
|
||||
return det, labels, mapper
|
||||
return det, labels, mapper, scores
|
||||
|
||||
def getPoly_core(boxes, labels, mapper, linkmap):
|
||||
# configs
|
||||
@@ -241,14 +244,14 @@ def getPoly_core(boxes, labels, mapper, linkmap):
|
||||
def getDetBoxes(textmap, linkmap, text_threshold, link_threshold, low_text, poly=False, estimate_num_chars=False):
|
||||
if poly and estimate_num_chars:
|
||||
raise Exception("Estimating the number of characters not currently supported with poly.")
|
||||
boxes, labels, mapper = getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text, estimate_num_chars)
|
||||
boxes, labels, mapper, scores = getDetBoxes_core(textmap, linkmap, text_threshold, link_threshold, low_text, estimate_num_chars)
|
||||
|
||||
if poly:
|
||||
polys = getPoly_core(boxes, labels, mapper, linkmap)
|
||||
else:
|
||||
polys = [None] * len(boxes)
|
||||
|
||||
return boxes, polys, mapper
|
||||
return boxes, polys, mapper, scores
|
||||
|
||||
def adjustResultCoordinates(polys, ratio_w, ratio_h, ratio_net = 2):
|
||||
if len(polys) > 0:
|
||||
@@ -15,6 +15,12 @@ from scrypted_sdk import (ObjectDetectionResult, ObjectDetectionSession,
|
||||
from common import yolo
|
||||
from predict import PredictPlugin
|
||||
|
||||
def cosine_similarity(vector_a, vector_b):
|
||||
dot_product = np.dot(vector_a, vector_b)
|
||||
norm_a = np.linalg.norm(vector_a)
|
||||
norm_b = np.linalg.norm(vector_b)
|
||||
similarity = dot_product / (norm_a * norm_b)
|
||||
return similarity
|
||||
|
||||
class FaceRecognizeDetection(PredictPlugin):
|
||||
def __init__(self, nativeId: str | None = None):
|
||||
@@ -25,13 +31,11 @@ class FaceRecognizeDetection(PredictPlugin):
|
||||
|
||||
self.labels = {
|
||||
0: "face",
|
||||
1: "plate",
|
||||
2: "text",
|
||||
}
|
||||
self.loop = asyncio.get_event_loop()
|
||||
self.minThreshold = 0.7
|
||||
self.minThreshold = 0.5
|
||||
|
||||
self.detectModel = self.downloadModel("scrypted_yolov8n_flt_320")
|
||||
self.detectModel = self.downloadModel("scrypted_yolov9t_face_320")
|
||||
self.faceModel = self.downloadModel("inception_resnet_v1")
|
||||
|
||||
def downloadModel(self, model: str):
|
||||
@@ -155,4 +159,22 @@ class FaceRecognizeDetection(PredictPlugin):
|
||||
if len(futures):
|
||||
await asyncio.wait(futures)
|
||||
|
||||
# last = None
|
||||
# for d in ret['detections']:
|
||||
# if d["className"] != "face":
|
||||
# continue
|
||||
# check = d.get("embedding")
|
||||
# if check is None:
|
||||
# continue
|
||||
# # decode base64 string check
|
||||
# embedding = base64.b64decode(check)
|
||||
# embedding = np.frombuffer(embedding, dtype=np.float32)
|
||||
# if last is None:
|
||||
# last = embedding
|
||||
# continue
|
||||
# # convert to numpy float32 arrays
|
||||
# similarity = cosine_similarity(last, embedding)
|
||||
# print('similarity', similarity)
|
||||
# last = embedding
|
||||
|
||||
return ret
|
||||
@@ -62,17 +62,17 @@ class TextRecognition(PredictPlugin):
|
||||
ratio_h = ratio_w = 1
|
||||
text_threshold = 0.7
|
||||
link_threshold = 0.9
|
||||
low_text = 0.4
|
||||
low_text = 0.5
|
||||
poly = False
|
||||
|
||||
boxes_list, polys_list = [], []
|
||||
boxes_list, polys_list, scores_list = [], [], []
|
||||
for out in y:
|
||||
# make score and link map
|
||||
score_text = out[:, :, 0]
|
||||
score_link = out[:, :, 1]
|
||||
|
||||
# Post-processing
|
||||
boxes, polys, mapper = getDetBoxes(
|
||||
boxes, polys, mapper, scores = getDetBoxes(
|
||||
score_text,
|
||||
score_link,
|
||||
text_threshold,
|
||||
@@ -96,18 +96,19 @@ class TextRecognition(PredictPlugin):
|
||||
if polys[k] is None:
|
||||
polys[k] = boxes[k]
|
||||
boxes_list.append(boxes)
|
||||
scores_list.append(scores)
|
||||
polys_list.append(polys)
|
||||
|
||||
preds: List[Prediction] = []
|
||||
for boxes in boxes_list:
|
||||
for box in boxes:
|
||||
for boxes, scores in zip(boxes_list, scores_list):
|
||||
for box, score in zip(boxes, scores):
|
||||
tl, tr, br, bl = box
|
||||
l = min(tl[0], bl[0])
|
||||
t = min(tl[1], tr[1])
|
||||
r = max(tr[0], br[0])
|
||||
b = max(bl[1], br[1])
|
||||
|
||||
pred = Prediction(0, 1, Rectangle(l, t, r, b))
|
||||
pred = Prediction(0, float(score), Rectangle(l, t, r, b))
|
||||
preds.append(pred)
|
||||
|
||||
return self.create_detection_result(preds, src_size, cvss)
|
||||
@@ -121,22 +122,23 @@ class TextRecognition(PredictPlugin):
|
||||
|
||||
futures: List[Future] = []
|
||||
|
||||
boundingBoxes = [d["boundingBox"] for d in detections]
|
||||
boundingBoxes, scores = [d["boundingBox"] for d in detections], [d["score"] for d in detections]
|
||||
if not len(boundingBoxes):
|
||||
return ret
|
||||
|
||||
text_groups = find_adjacent_groups(boundingBoxes)
|
||||
text_groups = find_adjacent_groups(boundingBoxes, scores)
|
||||
|
||||
detections = []
|
||||
for group in text_groups:
|
||||
boundingBox = group["union"]
|
||||
score = group["score"]
|
||||
d: ObjectDetectionResult = {
|
||||
"boundingBox": boundingBox,
|
||||
"score": 1,
|
||||
"score": score,
|
||||
"className": "text",
|
||||
}
|
||||
futures.append(
|
||||
asyncio.ensure_future(self.setLabel(d, image, group["skew_angle"]))
|
||||
asyncio.ensure_future(self.setLabel(d, image, group["skew_angle"], group['deskew_height']))
|
||||
)
|
||||
detections.append(d)
|
||||
|
||||
@@ -151,10 +153,10 @@ class TextRecognition(PredictPlugin):
|
||||
return ret
|
||||
|
||||
async def setLabel(
|
||||
self, d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float
|
||||
self, d: ObjectDetectionResult, image: scrypted_sdk.Image, skew_angle: float, deskew_height: float
|
||||
):
|
||||
try:
|
||||
image_tensor = await prepare_text_result(d, image, skew_angle)
|
||||
image_tensor = await prepare_text_result(d, image, skew_angle, deskew_height)
|
||||
preds = await self.predictTextModel(image_tensor)
|
||||
d["label"] = process_text_result(preds)
|
||||
|
||||
@@ -43,42 +43,58 @@ def are_boxes_adjacent(box1: BoundingBox, box2: BoundingBox):
|
||||
return False
|
||||
|
||||
|
||||
def find_adjacent_groups(boxes: List[BoundingBox]) -> List[dict]:
|
||||
def find_adjacent_groups(boxes: List[BoundingBox], scores: List[float]) -> List[dict]:
|
||||
groups = []
|
||||
|
||||
# sort boxes left to right
|
||||
boxes = sorted(boxes, key=lambda box: box[0])
|
||||
|
||||
for box in boxes:
|
||||
for index, box in enumerate(boxes):
|
||||
added_to_group = False
|
||||
for group in groups:
|
||||
for other_box in group["boxes"]:
|
||||
if are_boxes_adjacent(box, other_box):
|
||||
group["boxes"].append(box)
|
||||
group["scores"].append(scores[index])
|
||||
added_to_group = True
|
||||
break
|
||||
if added_to_group:
|
||||
break
|
||||
if not added_to_group:
|
||||
groups.append({"boxes": [box], "skew_angle": 0})
|
||||
groups.append({"boxes": [box], "scores": [scores[index]]})
|
||||
|
||||
# Calculate the skew angle of each group
|
||||
for group in groups:
|
||||
boxes = group["boxes"]
|
||||
group["union"] = union_boxes(boxes)
|
||||
if len(boxes) -1 :
|
||||
lm = (boxes[0][1] + boxes[0][3]) / 2
|
||||
rm = (boxes[-1][1] + boxes[-1][3]) / 2
|
||||
dx = (boxes[-1][0]) - (boxes[0][0] + boxes[0][2])
|
||||
if len(boxes) - 1:
|
||||
lm = boxes[0][1] + boxes[0][3] / 2
|
||||
rm = boxes[-1][1] + boxes[-1][3] / 2
|
||||
dx = (boxes[-1][0]) - (boxes[0][0])
|
||||
minx = min([box[0] for box in boxes])
|
||||
maxx = max([box[0] + box[2] for box in boxes])
|
||||
|
||||
# denoise by filtering the box height
|
||||
minh = min([box[3] for box in boxes])
|
||||
median_height = sorted([box[3] for box in boxes])[len(boxes) // 2]
|
||||
maxh = max([box[3] for box in boxes])
|
||||
pad_height = maxh * 0.05
|
||||
filter_height = median_height
|
||||
pad_height = filter_height * 0.05
|
||||
|
||||
dx = maxx - minx
|
||||
group['skew_angle'] = math.atan2(rm - lm, dx) * 2
|
||||
group['skew_angle'] = math.atan((rm - lm) / dx)
|
||||
group['deskew_height'] = filter_height + pad_height * 2
|
||||
# pad this box by a few pixels
|
||||
group['union'] = (group['union'][0] - pad_height, group['union'][1] - pad_height, group['union'][2] + pad_height * 2, group['union'][3] + pad_height * 2)
|
||||
group['union'] = (
|
||||
group['union'][0] - pad_height,
|
||||
group['union'][1] - pad_height,
|
||||
group['union'][2] + pad_height * 2,
|
||||
group['union'][3] + pad_height * 2)
|
||||
# average the scores
|
||||
group['score'] = sum(group['scores']) / len(group['scores'])
|
||||
else:
|
||||
group['skew_angle'] = 0
|
||||
group['deskew_height'] = boxes[0][3]
|
||||
group['score'] = group['scores'][0]
|
||||
|
||||
return groups
|
||||
@@ -1 +1 @@
|
||||
opencv-python
|
||||
opencv-python==4.10.0.82
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
|
||||
numpy==1.26.4
|
||||
openvino==2024.1.0
|
||||
|
||||
# pillow-simd is available on x64 linux
|
||||
# pillow-simd confirmed not building with arm64 linux or apple silicon
|
||||
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
|
||||
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
|
||||
Pillow==10.3.0
|
||||
|
||||
4
plugins/prebuffer-mixin/package-lock.json
generated
4
plugins/prebuffer-mixin/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/prebuffer-mixin",
|
||||
"version": "0.10.23",
|
||||
"version": "0.10.27",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/prebuffer-mixin",
|
||||
"version": "0.10.23",
|
||||
"version": "0.10.27",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/prebuffer-mixin",
|
||||
"version": "0.10.23",
|
||||
"version": "0.10.27",
|
||||
"description": "Video Stream Rebroadcast, Prebuffer, and Management Plugin for Scrypted.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -2,6 +2,7 @@ import { cloneDeep } from '@scrypted/common/src/clone-deep';
|
||||
import { Deferred } from "@scrypted/common/src/deferred";
|
||||
import { listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
|
||||
import { createActivityTimeout } from '@scrypted/common/src/activity-timeout';
|
||||
import { createRtspParser } from "@scrypted/common/src/rtsp-server";
|
||||
import { parseSdp } from "@scrypted/common/src/sdp-utils";
|
||||
import { StreamChunk, StreamParser } from '@scrypted/common/src/stream-parser';
|
||||
@@ -13,15 +14,9 @@ const { mediaManager } = sdk;
|
||||
|
||||
export interface ParserSession<T extends string> {
|
||||
parserSpecific?: any;
|
||||
sdp: Promise<Buffer[]>;
|
||||
sdp: Promise<string>;
|
||||
resetActivityTimer?: () => void,
|
||||
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions): ResponseMediaStreamOptions;
|
||||
inputAudioCodec?: string;
|
||||
inputVideoCodec?: string;
|
||||
inputVideoResolution?: {
|
||||
width: number,
|
||||
height: number,
|
||||
},
|
||||
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions, inputVideoCodec: string, inputAudioCodec: string): ResponseMediaStreamOptions;
|
||||
start(): void;
|
||||
kill(error?: Error): void;
|
||||
killed: Promise<void>;
|
||||
@@ -29,6 +24,7 @@ export interface ParserSession<T extends string> {
|
||||
|
||||
emit(container: T, chunk: StreamChunk): this;
|
||||
on(container: T, callback: (chunk: StreamChunk) => void): this;
|
||||
on(error: 'error', callback: (e: Error) => void): this;
|
||||
removeListener(event: T | 'killed', callback: any): this;
|
||||
once(event: T | 'killed', listener: (...args: any[]) => void): this;
|
||||
}
|
||||
@@ -100,65 +96,37 @@ export async function parseAudioCodec(cp: ChildProcess) {
|
||||
export function setupActivityTimer(container: string, kill: (error?: Error) => void, events: {
|
||||
once(event: 'killed', callback: () => void): void,
|
||||
}, timeout: number) {
|
||||
let dataTimeout: NodeJS.Timeout;
|
||||
|
||||
function dataKill() {
|
||||
const ret = createActivityTimeout(timeout, () => {
|
||||
const str = 'timeout waiting for data, killing parser session';
|
||||
console.error(str, container);
|
||||
kill(new Error(str));
|
||||
}
|
||||
|
||||
let lastTime = Date.now();
|
||||
function resetActivityTimer() {
|
||||
lastTime = Date.now();
|
||||
}
|
||||
|
||||
function clearActivityTimer() {
|
||||
clearInterval(dataTimeout);
|
||||
}
|
||||
|
||||
if (timeout) {
|
||||
dataTimeout = setInterval(() => {
|
||||
if (Date.now() > lastTime + timeout) {
|
||||
clearInterval(dataTimeout);
|
||||
dataTimeout = undefined;
|
||||
dataKill();
|
||||
}
|
||||
}, timeout);
|
||||
}
|
||||
|
||||
events.once('killed', () => clearInterval(dataTimeout));
|
||||
|
||||
resetActivityTimer();
|
||||
return {
|
||||
resetActivityTimer,
|
||||
clearActivityTimer,
|
||||
}
|
||||
});
|
||||
events.once('killed', () => ret.clearActivityTimer());
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
export async function startParserSession<T extends string>(ffmpegInput: FFmpegInput, options: ParserOptions<T>): Promise<ParserSession<T>> {
|
||||
const { console } = options;
|
||||
|
||||
let isActive = true;
|
||||
const events = new EventEmitter();
|
||||
// need this to prevent kill from throwing due to uncaught Error during cleanup
|
||||
events.on('error', e => console.error('rebroadcast error', e));
|
||||
|
||||
let inputAudioCodec: string;
|
||||
let inputVideoCodec: string;
|
||||
let inputVideoResolution: string[];
|
||||
events.on('error', () => {});
|
||||
|
||||
let sessionKilled: any;
|
||||
const killed = new Promise<void>(resolve => {
|
||||
sessionKilled = resolve;
|
||||
});
|
||||
|
||||
const sdpDeferred = new Deferred<string>();
|
||||
function kill(error?: Error) {
|
||||
error ||= new Error('killed');
|
||||
if (isActive) {
|
||||
events.emit('killed');
|
||||
events.emit('error', error || new Error('killed'));
|
||||
events.emit('error', error);
|
||||
}
|
||||
if (!sdpDeferred.finished)
|
||||
sdpDeferred.reject(error);
|
||||
isActive = false;
|
||||
sessionKilled();
|
||||
safeKillFFmpeg(cp);
|
||||
@@ -198,7 +166,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
try {
|
||||
ensureActive(() => socket.destroy());
|
||||
|
||||
for await (const chunk of parser.parse(socket, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
|
||||
for await (const chunk of parser.parse(socket, undefined, undefined)) {
|
||||
events.emit(container, chunk);
|
||||
resetActivityTimer();
|
||||
}
|
||||
@@ -245,7 +213,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
try {
|
||||
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
|
||||
|
||||
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
|
||||
for await (const chunk of parser.parse(pipe as any, undefined, undefined)) {
|
||||
await deferredStart.promise;
|
||||
events.emit(container, chunk);
|
||||
resetActivityTimer();
|
||||
@@ -259,17 +227,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
};
|
||||
|
||||
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
|
||||
rtsp.sdp.then(sdp => {
|
||||
const parsed = parseSdp(sdp);
|
||||
const audio = parsed.msections.find(msection => msection.type === 'audio');
|
||||
const video = parsed.msections.find(msection => msection.type === 'video');
|
||||
inputVideoCodec = video?.codec;
|
||||
inputAudioCodec = audio?.codec;
|
||||
});
|
||||
|
||||
const sdp = new Deferred<Buffer[]>();
|
||||
rtsp.sdp.then(r => sdp.resolve([Buffer.from(r)]));
|
||||
killed.then(() => sdp.reject(new Error("ffmpeg killed before sdp could be parsed")));
|
||||
rtsp.sdp.then(sdp => sdpDeferred.resolve(sdp));
|
||||
|
||||
start();
|
||||
|
||||
@@ -277,25 +235,13 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
start() {
|
||||
deferredStart.resolve();
|
||||
},
|
||||
sdp: sdp.promise,
|
||||
get inputAudioCodec() {
|
||||
return inputAudioCodec;
|
||||
},
|
||||
get inputVideoCodec() {
|
||||
return inputVideoCodec;
|
||||
},
|
||||
get inputVideoResolution() {
|
||||
return {
|
||||
width: parseInt(inputVideoResolution?.[2]),
|
||||
height: parseInt(inputVideoResolution?.[3]),
|
||||
}
|
||||
},
|
||||
sdp: sdpDeferred.promise,
|
||||
get isActive() { return isActive },
|
||||
kill(error?: Error) {
|
||||
kill(error);
|
||||
},
|
||||
killed,
|
||||
negotiateMediaStream: () => {
|
||||
negotiateMediaStream: (requestMediaStream: RequestMediaStreamOptions, inputVideoCodec, inputAudioCodec) => {
|
||||
const ret: ResponseMediaStreamOptions = cloneDeep(ffmpegInput.mediaStreamOptions) || {
|
||||
id: undefined,
|
||||
name: undefined,
|
||||
|
||||
@@ -3,8 +3,8 @@ import { getDebugModeH264EncoderArgs, getH264EncoderArgs } from '@scrypted/commo
|
||||
import { addVideoFilterArguments } from '@scrypted/common/src/ffmpeg-helpers';
|
||||
import { ListenZeroSingleClientTimeoutError, closeQuiet, listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
|
||||
import { readLength } from '@scrypted/common/src/read-stream';
|
||||
import { H264_NAL_TYPE_FU_B, H264_NAL_TYPE_IDR, H264_NAL_TYPE_MTAP16, H264_NAL_TYPE_MTAP32, H264_NAL_TYPE_RESERVED0, H264_NAL_TYPE_RESERVED30, H264_NAL_TYPE_RESERVED31, H264_NAL_TYPE_SEI, H264_NAL_TYPE_STAP_B, RtspServer, RtspTrack, createRtspParser, findH264NaluType, getNaluTypes, listenSingleRtspClient } from '@scrypted/common/src/rtsp-server';
|
||||
import { addTrackControls, parseSdp } from '@scrypted/common/src/sdp-utils';
|
||||
import { H264_NAL_TYPE_FU_B, H264_NAL_TYPE_IDR, H264_NAL_TYPE_MTAP16, H264_NAL_TYPE_MTAP32, H264_NAL_TYPE_RESERVED0, H264_NAL_TYPE_RESERVED30, H264_NAL_TYPE_RESERVED31, H264_NAL_TYPE_SEI, H264_NAL_TYPE_SPS, H264_NAL_TYPE_STAP_B, H265_NAL_TYPE_SPS, RtspServer, RtspTrack, createRtspParser, findH264NaluType, findH265NaluType, getNaluTypes, listenSingleRtspClient } from '@scrypted/common/src/rtsp-server';
|
||||
import { addTrackControls, getSpsPps, parseSdp } from '@scrypted/common/src/sdp-utils';
|
||||
import { SettingsMixinDeviceBase, SettingsMixinDeviceOptions } from "@scrypted/common/src/settings-mixin";
|
||||
import { sleep } from '@scrypted/common/src/sleep';
|
||||
import { StreamChunk, StreamParser } from '@scrypted/common/src/stream-parser';
|
||||
@@ -12,6 +12,7 @@ import sdk, { BufferConverter, ChargeState, DeviceProvider, EventListenerRegiste
|
||||
import { StorageSettings } from '@scrypted/sdk/storage-settings';
|
||||
import crypto from 'crypto';
|
||||
import { once } from 'events';
|
||||
import { parse as h264SpsParse } from "h264-sps-parser";
|
||||
import net, { AddressInfo } from 'net';
|
||||
import path from 'path';
|
||||
import semver from 'semver';
|
||||
@@ -23,6 +24,7 @@ import { getUrlLocalAdresses } from './local-addresses';
|
||||
import { REBROADCAST_MIXIN_INTERFACE_TOKEN } from './rebroadcast-mixin-token';
|
||||
import { connectRFC4571Parser, startRFC4571Parser } from './rfc4571';
|
||||
import { RtspSessionParserSpecific, startRtspSession } from './rtsp-session';
|
||||
import { getSpsResolution } from './sps-resolution';
|
||||
import { createStreamSettings } from './stream-settings';
|
||||
import { TRANSCODE_MIXIN_PROVIDER_NATIVE_ID, TranscodeMixinProvider, getTranscodeMixinProviderId } from './transcode-settings';
|
||||
|
||||
@@ -235,6 +237,58 @@ class PrebufferSession {
|
||||
}
|
||||
}
|
||||
|
||||
async parseCodecs(skipResolution?: boolean) {
|
||||
const sdp = await this.parserSession.sdp;
|
||||
const parsedSdp = parseSdp(sdp);
|
||||
const videoSection = parsedSdp.msections.find(msection => msection.type === 'video');
|
||||
const audioSection = parsedSdp.msections.find(msection => msection.type === 'audio');
|
||||
|
||||
const inputAudioCodec = audioSection?.codec;
|
||||
const inputVideoCodec = videoSection.codec;
|
||||
let inputVideoResolution: ReturnType<typeof getSpsResolution>;
|
||||
|
||||
if (!skipResolution) {
|
||||
// scan the prebuffer for sps
|
||||
for (const chunk of this.rtspPrebuffer) {
|
||||
try {
|
||||
let sps = findH264NaluType(chunk, H264_NAL_TYPE_SPS);
|
||||
if (sps) {
|
||||
const parsedSps = h264SpsParse(sps);
|
||||
inputVideoResolution = getSpsResolution(parsedSps);
|
||||
}
|
||||
else if (!sps) {
|
||||
// sps = findH265NaluType(chunk, H265_NAL_TYPE_SPS);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
}
|
||||
|
||||
if (!inputVideoResolution) {
|
||||
try {
|
||||
const spspps = getSpsPps(videoSection);
|
||||
let { sps } = spspps;
|
||||
if (sps) {
|
||||
if (videoSection.codec === 'h264') {
|
||||
const parsedSps = h264SpsParse(sps);
|
||||
inputVideoResolution = getSpsResolution(parsedSps);
|
||||
}
|
||||
else if (videoSection.codec === 'h265') {
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
inputVideoCodec,
|
||||
inputAudioCodec,
|
||||
inputVideoResolution,
|
||||
}
|
||||
}
|
||||
|
||||
async getMixinSettings(): Promise<Setting[]> {
|
||||
const settings: Setting[] = [];
|
||||
|
||||
@@ -272,10 +326,10 @@ class PrebufferSession {
|
||||
combobox: true,
|
||||
},
|
||||
{
|
||||
title: 'FFmpeg Output Arguments Prefix',
|
||||
title: 'FFmpeg Output Prefix',
|
||||
group,
|
||||
subgroup,
|
||||
description: 'Optional/Advanced: Additional output arguments to pass to the ffmpeg command. These will be placed before the input arguments.',
|
||||
description: 'Optional/Advanced: Additional output arguments to pass to the ffmpeg command. These will be placed before the output.',
|
||||
key: this.ffmpegOutputArgumentsKey,
|
||||
value: this.storage.getItem(this.ffmpegOutputArgumentsKey),
|
||||
choices: [
|
||||
@@ -338,8 +392,9 @@ class PrebufferSession {
|
||||
};
|
||||
|
||||
if (session) {
|
||||
const resolution = session.inputVideoResolution?.width && session.inputVideoResolution?.height
|
||||
? `${session.inputVideoResolution?.width}x${session.inputVideoResolution?.height}`
|
||||
const codecInfo = await this.parseCodecs();
|
||||
const resolution = codecInfo.inputVideoResolution?.width && codecInfo.inputVideoResolution?.height
|
||||
? `${codecInfo.inputVideoResolution?.width}x${codecInfo.inputVideoResolution?.height}`
|
||||
: 'unknown';
|
||||
|
||||
const idrInterval = this.getDetectedIdrInterval();
|
||||
@@ -359,7 +414,7 @@ class PrebufferSession {
|
||||
subgroup,
|
||||
title: 'Detected Video/Audio Codecs',
|
||||
readonly: true,
|
||||
value: (session?.inputVideoCodec?.toString() || 'unknown') + '/' + (session?.inputAudioCodec?.toString() || 'unknown'),
|
||||
value: (codecInfo?.inputVideoCodec?.toString() || 'unknown') + '/' + (codecInfo?.inputAudioCodec?.toString() || 'unknown'),
|
||||
description: 'Configuring your camera to H264 video, and audio to Opus or PCM-mulaw (G.711ulaw) is recommended.'
|
||||
},
|
||||
{
|
||||
@@ -470,7 +525,6 @@ class PrebufferSession {
|
||||
session = startRFC4571Parser(this.console, connectRFC4571Parser(url), sdp, mediaStreamOptions, {
|
||||
timeout: 10000,
|
||||
});
|
||||
this.sdp = session.sdp.then(buffers => Buffer.concat(buffers).toString());
|
||||
}
|
||||
else {
|
||||
const moBuffer = await mediaManager.convertMediaObjectToBuffer(mo, ScryptedMimeTypes.FFmpegInput);
|
||||
@@ -501,7 +555,6 @@ class PrebufferSession {
|
||||
audioSoftMuted,
|
||||
rtspRequestTimeout: 10000,
|
||||
});
|
||||
this.sdp = session.sdp.then(buffers => Buffer.concat(buffers).toString());
|
||||
}
|
||||
else {
|
||||
let acodec: string[];
|
||||
@@ -549,13 +602,18 @@ class PrebufferSession {
|
||||
// the rtsp parser should always stream copy unless audio is soft muted.
|
||||
acodec,
|
||||
});
|
||||
this.sdp = rtspParser.sdp;
|
||||
rbo.parsers.rtsp = rtspParser;
|
||||
|
||||
session = await startParserSession(ffmpegInput, rbo);
|
||||
}
|
||||
}
|
||||
|
||||
this.sdp = session.sdp;
|
||||
session.on('error', e => {
|
||||
if (!e.message?.startsWith('killed:'))
|
||||
console.error('rebroadcast error', e)
|
||||
});
|
||||
|
||||
if (this.usingScryptedParser && !isRfc4571) {
|
||||
// watch the stream for 10 seconds to see if an weird nalu is encountered.
|
||||
// if one is found and using scrypted parser as default, will need to restart rebroadcast to prevent
|
||||
@@ -615,31 +673,6 @@ class PrebufferSession {
|
||||
}
|
||||
|
||||
await session.sdp;
|
||||
|
||||
// complain to the user about the codec if necessary. upstream may send a audio
|
||||
// stream but report none exists (to request muting).
|
||||
if (!audioSoftMuted && advertisedAudioCodec && session.inputAudioCodec !== undefined
|
||||
&& session.inputAudioCodec !== advertisedAudioCodec) {
|
||||
this.console.warn('Audio codec plugin reported vs detected mismatch', advertisedAudioCodec, detectedAudioCodec);
|
||||
}
|
||||
|
||||
const advertisedVideoCodec = mso?.video?.codec;
|
||||
if (advertisedVideoCodec && session.inputVideoCodec !== undefined
|
||||
&& session.inputVideoCodec !== advertisedVideoCodec) {
|
||||
this.console.warn('Video codec plugin reported vs detected mismatch', advertisedVideoCodec, session.inputVideoCodec);
|
||||
}
|
||||
|
||||
if (!session.inputAudioCodec) {
|
||||
this.console.log('No audio stream detected.');
|
||||
}
|
||||
|
||||
// set/update the detected codec, set it to null if no audio was found.
|
||||
this.storage.setItem(this.lastDetectedAudioCodecKey, session.inputAudioCodec || 'null');
|
||||
|
||||
if (session.inputVideoCodec !== 'h264') {
|
||||
this.console.error(`Video codec is not h264. If there are errors, try changing your camera's encoder output.`);
|
||||
}
|
||||
|
||||
this.parserSession = session;
|
||||
session.killed.finally(() => {
|
||||
if (this.parserSession === session)
|
||||
@@ -647,6 +680,32 @@ class PrebufferSession {
|
||||
});
|
||||
session.killed.finally(() => clearTimeout(this.inactivityTimeout));
|
||||
|
||||
const codecInfo = await this.parseCodecs();
|
||||
|
||||
// complain to the user about the codec if necessary. upstream may send a audio
|
||||
// stream but report none exists (to request muting).
|
||||
if (!audioSoftMuted && advertisedAudioCodec && codecInfo.inputAudioCodec !== undefined
|
||||
&& codecInfo.inputAudioCodec !== advertisedAudioCodec) {
|
||||
this.console.warn('Audio codec plugin reported vs detected mismatch', advertisedAudioCodec, detectedAudioCodec);
|
||||
}
|
||||
|
||||
const advertisedVideoCodec = mso?.video?.codec;
|
||||
if (advertisedVideoCodec && codecInfo.inputVideoCodec !== undefined
|
||||
&& codecInfo.inputVideoCodec !== advertisedVideoCodec) {
|
||||
this.console.warn('Video codec plugin reported vs detected mismatch', advertisedVideoCodec, codecInfo.inputVideoCodec);
|
||||
}
|
||||
|
||||
if (!codecInfo.inputAudioCodec) {
|
||||
this.console.log('No audio stream detected.');
|
||||
}
|
||||
|
||||
// set/update the detected codec, set it to null if no audio was found.
|
||||
this.storage.setItem(this.lastDetectedAudioCodecKey, codecInfo.inputAudioCodec || 'null');
|
||||
|
||||
if (codecInfo.inputVideoCodec !== 'h264') {
|
||||
this.console.error(`Video codec is not h264. If there are errors, try changing your camera's encoder output.`);
|
||||
}
|
||||
|
||||
// settings ui refresh
|
||||
deviceManager.onMixinEvent(this.mixin.id, this.mixin, ScryptedInterface.Settings, undefined);
|
||||
|
||||
@@ -676,26 +735,26 @@ class PrebufferSession {
|
||||
session.killed.finally(() => clearTimeout(refreshTimeout));
|
||||
}
|
||||
|
||||
let shifts = 0;
|
||||
let prebufferContainer: PrebufferStreamChunk[] = this.rtspPrebuffer;
|
||||
let shifts = 0;
|
||||
let prebufferContainer: PrebufferStreamChunk[] = this.rtspPrebuffer;
|
||||
|
||||
session.on('rtsp', (chunk: PrebufferStreamChunk) => {
|
||||
const now = Date.now();
|
||||
session.on('rtsp', (chunk: PrebufferStreamChunk) => {
|
||||
const now = Date.now();
|
||||
|
||||
chunk.time = now;
|
||||
prebufferContainer.push(chunk);
|
||||
chunk.time = now;
|
||||
prebufferContainer.push(chunk);
|
||||
|
||||
while (prebufferContainer.length && prebufferContainer[0].time < now - prebufferDurationMs) {
|
||||
prebufferContainer.shift();
|
||||
shifts++;
|
||||
}
|
||||
while (prebufferContainer.length && prebufferContainer[0].time < now - prebufferDurationMs) {
|
||||
prebufferContainer.shift();
|
||||
shifts++;
|
||||
}
|
||||
|
||||
if (shifts > 100000) {
|
||||
prebufferContainer = prebufferContainer.slice();
|
||||
this.rtspPrebuffer = prebufferContainer;
|
||||
shifts = 0;
|
||||
}
|
||||
});
|
||||
if (shifts > 100000) {
|
||||
prebufferContainer = prebufferContainer.slice();
|
||||
this.rtspPrebuffer = prebufferContainer;
|
||||
shifts = 0;
|
||||
}
|
||||
});
|
||||
|
||||
session.start();
|
||||
return session;
|
||||
@@ -725,7 +784,7 @@ class PrebufferSession {
|
||||
return;
|
||||
}
|
||||
this.console.log(this.streamName, 'terminating rebroadcast due to inactivity');
|
||||
session.kill(new Error('stream inactivity'));
|
||||
session.kill(new Error('killed: stream inactivity'));
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
@@ -741,7 +800,7 @@ class PrebufferSession {
|
||||
if (!this.activeClients && this.parserSessionPromise) {
|
||||
this.console.log(this.streamName, 'terminating rebroadcast due to low battery or not charging')
|
||||
const session = await this.parserSessionPromise;
|
||||
session.kill(new Error('low battery or not charging'));
|
||||
session.kill(new Error('killed: low battery or not charging'));
|
||||
}
|
||||
} else {
|
||||
this.ensurePrebufferSession();
|
||||
@@ -913,7 +972,8 @@ class PrebufferSession {
|
||||
requestedPrebuffer = Math.min(defaultPrebuffer, this.getDetectedIdrInterval() || defaultPrebuffer);;
|
||||
}
|
||||
|
||||
const mediaStreamOptions: ResponseMediaStreamOptions = session.negotiateMediaStream(options);
|
||||
const codecInfo = await this.parseCodecs(true);
|
||||
const mediaStreamOptions: ResponseMediaStreamOptions = session.negotiateMediaStream(options, codecInfo.inputVideoCodec, codecInfo.inputAudioCodec);
|
||||
let sdp = await this.sdp;
|
||||
if (!mediaStreamOptions.video?.h264Info && this.usingScryptedParser) {
|
||||
mediaStreamOptions.video ||= {};
|
||||
@@ -1039,10 +1099,10 @@ class PrebufferSession {
|
||||
mediaStreamOptions.audio.sampleRate ||= audioSection.rtpmap.clock;
|
||||
}
|
||||
|
||||
if (session.inputVideoResolution?.width && session.inputVideoResolution?.height) {
|
||||
if (codecInfo.inputVideoResolution?.width && codecInfo.inputVideoResolution?.height) {
|
||||
// this may be an audio only request.
|
||||
if (mediaStreamOptions.video)
|
||||
Object.assign(mediaStreamOptions.video, session.inputVideoResolution);
|
||||
Object.assign(mediaStreamOptions.video, codecInfo.inputVideoResolution);
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
@@ -1705,45 +1765,16 @@ export class RebroadcastPlugin extends AutoenableMixinProvider implements MixinP
|
||||
async getMixin(mixinDevice: any, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: WritableDeviceState) {
|
||||
this.setHasEnabledMixin(mixinDeviceState.id);
|
||||
|
||||
// 8-11-2022
|
||||
// old scrypted had a bug where mixin device state was not exposing properties like id correctly
|
||||
// across rpc boundaries.
|
||||
let fork = false;
|
||||
try {
|
||||
const info = await systemManager.getComponent('info');
|
||||
const version = await info.getVersion();
|
||||
fork = semver.gte(version, '0.2.5');
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
|
||||
const { id } = mixinDeviceState;
|
||||
|
||||
if (fork && sdk.fork && typeof mixinDeviceState.id === 'string') {
|
||||
const forked = sdk.fork<RebroadcastPluginFork>();
|
||||
const { worker } = forked;
|
||||
|
||||
try {
|
||||
const result = await forked.result;
|
||||
const mixin = await result.newPrebufferMixin(async () => this.transcodeStorageSettings.values, mixinDevice, mixinDeviceInterfaces, mixinDeviceState);
|
||||
this.currentMixins.set(mixin, {
|
||||
worker,
|
||||
id,
|
||||
});
|
||||
return mixin;
|
||||
}
|
||||
catch (e) {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
else {
|
||||
const ret = await newPrebufferMixin(async () => this.transcodeStorageSettings.values, mixinDevice, mixinDeviceInterfaces, mixinDeviceState);
|
||||
this.currentMixins.set(ret, {
|
||||
worker: undefined,
|
||||
id,
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
const forked = sdk.fork<RebroadcastPluginFork>();
|
||||
const { worker } = forked;
|
||||
const result = await forked.result;
|
||||
const mixin = await result.newPrebufferMixin(async () => this.transcodeStorageSettings.values, mixinDevice, mixinDeviceInterfaces, mixinDeviceState);
|
||||
this.currentMixins.set(mixin, {
|
||||
worker,
|
||||
id,
|
||||
});
|
||||
return mixin;
|
||||
}
|
||||
|
||||
async releaseMixin(id: string, mixinDevice: PrebufferMixin) {
|
||||
|
||||
@@ -69,7 +69,7 @@ export function startRFC4571Parser(console: Console, socket: Readable, sdp: stri
|
||||
let isActive = true;
|
||||
const events = new EventEmitter();
|
||||
// need this to prevent kill from throwing due to uncaught Error during cleanup
|
||||
events.on('error', e => console.error('rebroadcast error', e));
|
||||
events.on('error', () => {});
|
||||
|
||||
const parsedSdp = parseSdp(sdp);
|
||||
const audioSection = parsedSdp.msections.find(msection => msection.type === 'audio');
|
||||
@@ -191,19 +191,14 @@ export function startRFC4571Parser(console: Console, socket: Readable, sdp: stri
|
||||
|
||||
return {
|
||||
start,
|
||||
sdp: Promise.resolve([Buffer.from(sdp)]),
|
||||
inputAudioCodec,
|
||||
inputVideoCodec,
|
||||
get inputVideoResolution() {
|
||||
return inputVideoResolution;
|
||||
},
|
||||
sdp: Promise.resolve(sdp),
|
||||
get isActive() { return isActive },
|
||||
kill(error?: Error) {
|
||||
kill(error);
|
||||
},
|
||||
killed,
|
||||
resetActivityTimer,
|
||||
negotiateMediaStream: (requestMediaStream) => {
|
||||
negotiateMediaStream: (requestMediaStream,inputVideoCodec, inputAudioCodec) => {
|
||||
return negotiateMediaStream(sdp, mediaStreamOptions, inputVideoCodec, inputAudioCodec, requestMediaStream);
|
||||
},
|
||||
emit(container: 'rtsp', chunk: StreamChunk) {
|
||||
|
||||
@@ -24,7 +24,7 @@ export async function startRtspSession(console: Console, url: string, mediaStrea
|
||||
let isActive = true;
|
||||
const events = new EventEmitter();
|
||||
// need this to prevent kill from throwing due to uncaught Error during cleanup
|
||||
events.on('error', e => console.error('rebroadcast error', e));
|
||||
events.on('error', () => {});
|
||||
|
||||
let servers: dgram.Socket[] = [];
|
||||
const rtspClient = new RtspClient(url);
|
||||
@@ -192,80 +192,22 @@ export async function startRtspSession(console: Console, url: string, mediaStrea
|
||||
|
||||
// this return block is intentional, to ensure that the remaining code happens sync.
|
||||
return (() => {
|
||||
const audioSection = parsedSdp.msections.find(msection => msection.type === 'audio');
|
||||
const videoSection = parsedSdp.msections.find(msection => msection.type === 'video');
|
||||
|
||||
if (!videoSection)
|
||||
throw new Error('SDP does not contain a video section!');
|
||||
|
||||
const inputAudioCodec = audioSection?.codec;
|
||||
const inputVideoCodec = videoSection.codec;
|
||||
|
||||
|
||||
let inputVideoResolution: {
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
|
||||
const probeStart = Date.now();
|
||||
const probe = (chunk: StreamChunk) => {
|
||||
if (Date.now() - probeStart > 6000)
|
||||
events.removeListener('rtsp', probe);
|
||||
const sps = findH264NaluType(chunk, H264_NAL_TYPE_SPS);
|
||||
if (sps) {
|
||||
try {
|
||||
const parsedSps = spsParse(sps);
|
||||
inputVideoResolution = getSpsResolution(parsedSps);
|
||||
// console.log(inputVideoResolution);
|
||||
console.log('parsed bitstream sps', inputVideoResolution);
|
||||
}
|
||||
catch (e) {
|
||||
console.warn('sps parsing failed');
|
||||
inputVideoResolution = {
|
||||
width: NaN,
|
||||
height: NaN,
|
||||
}
|
||||
}
|
||||
events.removeListener('rtsp', probe);
|
||||
}
|
||||
}
|
||||
|
||||
if (!inputVideoResolution)
|
||||
events.on('rtsp', probe);
|
||||
|
||||
const sprop = videoSection
|
||||
?.fmtp?.[0]?.parameters?.['sprop-parameter-sets'];
|
||||
const sdpSps = sprop?.split(',')?.[0];
|
||||
// const sdpPps = sprop?.split(',')?.[1];
|
||||
|
||||
if (sdpSps) {
|
||||
try {
|
||||
const sps = Buffer.from(sdpSps, 'base64');
|
||||
const parsedSps = spsParse(sps);
|
||||
inputVideoResolution = getSpsResolution(parsedSps);
|
||||
console.log('parsed sdp sps', inputVideoResolution);
|
||||
}
|
||||
catch (e) {
|
||||
console.warn('sdp sps parsing failed');
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
parserSpecific,
|
||||
start,
|
||||
sdp: Promise.resolve([Buffer.from(sdp)]),
|
||||
inputAudioCodec,
|
||||
inputVideoCodec,
|
||||
get inputVideoResolution() {
|
||||
return inputVideoResolution;
|
||||
},
|
||||
sdp: Promise.resolve(sdp),
|
||||
get isActive() { return isActive },
|
||||
kill(error?: Error) {
|
||||
kill(error);
|
||||
},
|
||||
killed,
|
||||
resetActivityTimer,
|
||||
negotiateMediaStream: (requestMediaStream) => {
|
||||
negotiateMediaStream: (requestMediaStream, inputVideoCodec, inputAudioCodec) => {
|
||||
return negotiateMediaStream(sdp, mediaStreamOptions, inputVideoCodec, inputAudioCodec, requestMediaStream);
|
||||
},
|
||||
emit(container: 'rtsp', chunk: StreamChunk) {
|
||||
|
||||
15
plugins/reolink/package-lock.json
generated
15
plugins/reolink/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/reolink",
|
||||
"version": "0.0.66",
|
||||
"version": "0.0.71",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/reolink",
|
||||
"version": "0.0.66",
|
||||
"version": "0.0.71",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
@@ -14,6 +14,7 @@
|
||||
"onvif": "file:../onvif/onvif"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
"@types/node": "^18.16.18"
|
||||
}
|
||||
},
|
||||
@@ -24,23 +25,23 @@
|
||||
"dependencies": {
|
||||
"@scrypted/sdk": "file:../sdk",
|
||||
"@scrypted/server": "file:../server",
|
||||
"http-auth-utils": "^3.0.2",
|
||||
"node-fetch-commonjs": "^3.1.1",
|
||||
"http-auth-utils": "^5.0.1",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.10.8",
|
||||
"@types/node": "^20.11.0",
|
||||
"ts-node": "^10.9.2"
|
||||
}
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.3.4",
|
||||
"version": "0.3.31",
|
||||
"dev": true,
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.6.5",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.15.9",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/reolink",
|
||||
"version": "0.0.66",
|
||||
"version": "0.0.71",
|
||||
"description": "Reolink Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
@@ -40,6 +40,7 @@
|
||||
"onvif": "file:../onvif/onvif"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^18.16.18"
|
||||
"@types/node": "^18.16.18",
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { sleep } from '@scrypted/common/src/sleep';
|
||||
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, Intercom, MediaObject, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, PanTiltZoom, PanTiltZoomCommand, PictureOptions, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, Setting } from "@scrypted/sdk";
|
||||
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, DeviceProvider, Device, Intercom, MediaObject, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, OnOff, PanTiltZoom, PanTiltZoomCommand, PictureOptions, Reboot, RequestPictureOptions, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, Setting } from "@scrypted/sdk";
|
||||
import { StorageSettings } from '@scrypted/sdk/storage-settings';
|
||||
import { EventEmitter } from "stream";
|
||||
import { Destroyable, RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
|
||||
@@ -8,15 +8,50 @@ import { listenEvents } from './onvif-events';
|
||||
import { OnvifIntercom } from './onvif-intercom';
|
||||
import { AIState, DevInfo, Enc, ReolinkCameraClient } from './reolink-api';
|
||||
|
||||
class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom, ObjectDetector, PanTiltZoom {
|
||||
class ReolinkCameraSiren extends ScryptedDeviceBase implements OnOff {
|
||||
intervalId: NodeJS.Timeout;
|
||||
|
||||
constructor(public camera: ReolinkCamera, nativeId: string) {
|
||||
super(nativeId);
|
||||
}
|
||||
|
||||
async turnOff() {
|
||||
await this.setSiren(false);
|
||||
}
|
||||
|
||||
async turnOn() {
|
||||
await this.setSiren(true);
|
||||
}
|
||||
|
||||
private async setSiren(on: boolean) {
|
||||
// doorbell doesn't seem to support alarm_mode = 'manul', so let's pump the API every second and run the siren in timed mode.
|
||||
if (this.camera.storageSettings.values.doorbell) {
|
||||
if (!on) {
|
||||
clearInterval(this.intervalId);
|
||||
return;
|
||||
}
|
||||
this.intervalId = setInterval(async () => {
|
||||
const api = this.camera.getClient();
|
||||
await api.setSiren(on, 1);
|
||||
}, 1000);
|
||||
return;
|
||||
}
|
||||
const api = this.camera.getClient();
|
||||
await api.setSiren(on);
|
||||
}
|
||||
}
|
||||
|
||||
class ReolinkCamera extends RtspSmartCamera implements Camera, DeviceProvider, Reboot, Intercom, ObjectDetector, PanTiltZoom {
|
||||
client: ReolinkCameraClient;
|
||||
onvifClient: OnvifCameraAPI;
|
||||
onvifIntercom = new OnvifIntercom(this);
|
||||
videoStreamOptions: Promise<UrlMediaStreamOptions[]>;
|
||||
motionTimeout: NodeJS.Timeout;
|
||||
siren: ReolinkCameraSiren;
|
||||
|
||||
storageSettings = new StorageSettings(this, {
|
||||
doorbell: {
|
||||
hide: true,
|
||||
title: 'Doorbell',
|
||||
description: 'This camera is a Reolink Doorbell.',
|
||||
type: 'boolean',
|
||||
@@ -28,7 +63,7 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
type: 'number',
|
||||
},
|
||||
motionTimeout: {
|
||||
group: 'Advanced',
|
||||
subgroup: 'Advanced',
|
||||
title: 'Motion Timeout',
|
||||
defaultValue: 20,
|
||||
type: 'number',
|
||||
@@ -50,10 +85,24 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
this.updatePtzCaps();
|
||||
},
|
||||
},
|
||||
doorbellUseOnvifDetections: {
|
||||
hide: true,
|
||||
defaultValue: true,
|
||||
}
|
||||
deviceInfo: {
|
||||
json: true,
|
||||
hide: true
|
||||
},
|
||||
abilities: {
|
||||
json: true,
|
||||
hide: true
|
||||
},
|
||||
useOnvifDetections: {
|
||||
subgroup: 'Advanced',
|
||||
title: 'Use ONVIF for Object Detection',
|
||||
choices: [
|
||||
'Default',
|
||||
'Enabled',
|
||||
'Disabled',
|
||||
],
|
||||
defaultValue: 'Default',
|
||||
},
|
||||
});
|
||||
|
||||
constructor(nativeId: string, provider: RtspProvider) {
|
||||
@@ -61,8 +110,8 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
|
||||
this.updateDeviceInfo();
|
||||
this.updateDevice();
|
||||
|
||||
this.updatePtzCaps();
|
||||
this.updateAbilities();
|
||||
}
|
||||
|
||||
updatePtzCaps() {
|
||||
@@ -74,6 +123,26 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
}
|
||||
}
|
||||
|
||||
async updateAbilities() {
|
||||
const api = this.getClient();
|
||||
const abilities = await api.getAbility();
|
||||
this.console.log('getAbility', JSON.stringify(abilities));
|
||||
}
|
||||
|
||||
supportsOnvifDetections() {
|
||||
const onvif: string[] = [
|
||||
// wifi
|
||||
'CX410W',
|
||||
'Reolink Video Doorbell WiFi',
|
||||
|
||||
// poe
|
||||
'CX410',
|
||||
'CX810',
|
||||
'Reolink Video Doorbell PoE',
|
||||
];
|
||||
return onvif.includes(this.storageSettings.values.deviceInfo?.model);
|
||||
}
|
||||
|
||||
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
|
||||
return;
|
||||
}
|
||||
@@ -84,14 +153,8 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
}
|
||||
|
||||
async getObjectTypes(): Promise<ObjectDetectionTypes> {
|
||||
if (this.storageSettings.values.doorbell && this.storageSettings.values.doorbellUseOnvifDetections) {
|
||||
return {
|
||||
classes: ['person'],
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const ai: AIState = this.storageSettings.values.hasObjectDetector[0]?.value;
|
||||
const ai: AIState = this.storageSettings.values.hasObjectDetector?.value;
|
||||
const classes: string[] = [];
|
||||
|
||||
for (const key of Object.keys(ai)) {
|
||||
@@ -140,9 +203,12 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
if (this.storageSettings.values.ptz?.length) {
|
||||
interfaces.push(ScryptedInterface.PanTiltZoom);
|
||||
}
|
||||
if (this.storageSettings.values.hasObjectDetector || (this.storageSettings.values.doorbellUseOnvifDetections && this.storageSettings.values.doorbell)) {
|
||||
if (this.storageSettings.values.hasObjectDetector) {
|
||||
interfaces.push(ScryptedInterface.ObjectDetector);
|
||||
}
|
||||
if (this.storageSettings.values.abilities?.Ability?.supportAudioAlarm?.ver && this.storageSettings.values.abilities?.Ability?.supportAudioAlarm?.ver !== 0) {
|
||||
interfaces.push(ScryptedInterface.DeviceProvider);
|
||||
}
|
||||
await this.provider.updateDevice(this.nativeId, name, interfaces, type);
|
||||
}
|
||||
|
||||
@@ -168,7 +234,6 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
return this.client;
|
||||
}
|
||||
|
||||
|
||||
async getOnvifClient() {
|
||||
if (!this.onvifClient)
|
||||
this.onvifClient = await this.createOnvifClient();
|
||||
@@ -176,7 +241,7 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
}
|
||||
|
||||
createOnvifClient() {
|
||||
return connectCameraAPI(this.getHttpAddress(), this.getUsername(), this.getPassword(), this.console, this.storage.getItem('onvifDoorbellEvent'));
|
||||
return connectCameraAPI(this.getHttpAddress(), this.getUsername(), this.getPassword(), this.console, this.storageSettings.values.doorbell ? this.storage.getItem('onvifDoorbellEvent') : undefined);
|
||||
}
|
||||
|
||||
async listenEvents() {
|
||||
@@ -186,6 +251,7 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
// reolink ai might not trigger motion if objects are detected, weird.
|
||||
const startAI = async (ret: Destroyable, triggerMotion: () => void) => {
|
||||
let hasSucceeded = false;
|
||||
let hasSet = false;
|
||||
while (!killed) {
|
||||
try {
|
||||
const ai = await client.getAiState();
|
||||
@@ -204,6 +270,12 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
if (!classes.length)
|
||||
return;
|
||||
|
||||
|
||||
if (!hasSet) {
|
||||
hasSet = true;
|
||||
this.storageSettings.values.hasObjectDetector = ai;
|
||||
}
|
||||
|
||||
hasSucceeded = true;
|
||||
const od: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
@@ -232,34 +304,44 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
}
|
||||
}
|
||||
|
||||
if (this.storageSettings.values.doorbell) {
|
||||
const useOnvifDetections: boolean = (this.storageSettings.values.useOnvifDetections === 'Default' && this.supportsOnvifDetections()) || this.storageSettings.values.useOnvifDetections === 'Enabled';
|
||||
if (useOnvifDetections) {
|
||||
const ret = await listenEvents(this, await this.createOnvifClient(), this.storageSettings.values.motionTimeout * 1000);
|
||||
if (!this.storageSettings.values.doorbellUseOnvifDetections) {
|
||||
startAI(ret, ret.triggerMotion);
|
||||
}
|
||||
else {
|
||||
ret.on('onvifEvent', (eventTopic: string, dataValue: any) => {
|
||||
if (eventTopic.includes('PeopleDetect')) {
|
||||
if (dataValue) {
|
||||
ret.emit('event', OnvifEvent.MotionStart);
|
||||
ret.on('onvifEvent', (eventTopic: string, dataValue: any) => {
|
||||
let className: string;
|
||||
if (eventTopic.includes('PeopleDetect')) {
|
||||
className = 'people';
|
||||
}
|
||||
else if (eventTopic.includes('FaceDetect')) {
|
||||
className = 'face';
|
||||
}
|
||||
else if (eventTopic.includes('VehicleDetect')) {
|
||||
className = 'vehicle';
|
||||
}
|
||||
else if (eventTopic.includes('DogCatDetect')) {
|
||||
className = 'dog_cat';
|
||||
}
|
||||
else if (eventTopic.includes('Package')) {
|
||||
className = 'package';
|
||||
}
|
||||
if (className && dataValue) {
|
||||
ret.emit('event', OnvifEvent.MotionStart);
|
||||
|
||||
const od: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
detections: [
|
||||
{
|
||||
className: 'person',
|
||||
score: 1,
|
||||
}
|
||||
],
|
||||
};
|
||||
sdk.deviceManager.onDeviceEvent(this.nativeId, ScryptedInterface.ObjectDetector, od);
|
||||
}
|
||||
else {
|
||||
ret.emit('event', OnvifEvent.MotionStop);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
const od: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
detections: [
|
||||
{
|
||||
className,
|
||||
score: 1,
|
||||
}
|
||||
],
|
||||
};
|
||||
sdk.deviceManager.onDeviceEvent(this.nativeId, ScryptedInterface.ObjectDetector, od);
|
||||
}
|
||||
else {
|
||||
ret.emit('event', OnvifEvent.MotionStop);
|
||||
}
|
||||
});
|
||||
|
||||
ret.on('close', () => killed = true);
|
||||
ret.on('error', () => killed = true);
|
||||
@@ -397,15 +479,18 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
}
|
||||
];
|
||||
|
||||
if (deviceInfo?.model?.replace(' ', '').includes('Duo2') || deviceInfo?.model?.replace(' ', '').includes('Duo3')) {
|
||||
// these models don't have rtmp main stream or any ext streams... need to filter those out.
|
||||
}
|
||||
|
||||
if (deviceInfo?.model == "Reolink TrackMix PoE") {
|
||||
streams.push({
|
||||
name: '',
|
||||
id: 'autotrack.bcs',
|
||||
container: 'rtmp',
|
||||
video: { width: 896, height: 512 },
|
||||
url: ''
|
||||
|
||||
})
|
||||
url: '',
|
||||
});
|
||||
}
|
||||
|
||||
for (const stream of streams) {
|
||||
@@ -452,7 +537,6 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
}
|
||||
|
||||
return streams;
|
||||
|
||||
}
|
||||
|
||||
async putSetting(key: string, value: string) {
|
||||
@@ -475,22 +559,53 @@ class ReolinkCamera extends RtspSmartCamera implements Camera, Reboot, Intercom,
|
||||
return [
|
||||
...await super.getRtspPortOverrideSettings(),
|
||||
...await this.storageSettings.getSettings(),
|
||||
{
|
||||
key: 'rtmpPort',
|
||||
subgroup: 'Advanced',
|
||||
title: 'RTMP Port Override',
|
||||
placeholder: '1935',
|
||||
value: this.storage.getItem('rtmpPort'),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
getRtmpAddress() {
|
||||
return `${this.getIPAddress()}:${this.storage.getItem('rtmpPort') || 1935}`;
|
||||
}
|
||||
|
||||
createSiren() {
|
||||
const sirenNativeId = `${this.nativeId}-siren`;
|
||||
this.siren = new ReolinkCameraSiren(this, sirenNativeId);
|
||||
|
||||
const sirenDevice: Device = {
|
||||
providerNativeId: this.nativeId,
|
||||
name: 'Reolink Siren',
|
||||
nativeId: sirenNativeId,
|
||||
info: {
|
||||
manufacturer: 'Reolink',
|
||||
serialNumber: this.nativeId,
|
||||
},
|
||||
interfaces: [
|
||||
ScryptedInterface.OnOff
|
||||
],
|
||||
type: ScryptedDeviceType.Siren,
|
||||
};
|
||||
sdk.deviceManager.onDevicesChanged({
|
||||
providerNativeId: this.nativeId,
|
||||
devices: [sirenDevice]
|
||||
});
|
||||
|
||||
return sirenNativeId;
|
||||
}
|
||||
|
||||
async getDevice(nativeId: string): Promise<any> {
|
||||
if (nativeId.endsWith('-siren')) {
|
||||
return this.siren;
|
||||
}
|
||||
throw new Error(`${nativeId} is unknown`);
|
||||
}
|
||||
|
||||
async releaseDevice(id: string, nativeId: string) {
|
||||
if (nativeId.endsWith('-siren')) {
|
||||
delete this.siren;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class ReolinkProider extends RtspProvider {
|
||||
class ReolinkProvider extends RtspProvider {
|
||||
getAdditionalInterfaces() {
|
||||
return [
|
||||
ScryptedInterface.Reboot,
|
||||
@@ -505,23 +620,41 @@ class ReolinkProider extends RtspProvider {
|
||||
const httpAddress = `${settings.ip}:${settings.httpPort || 80}`;
|
||||
let info: DeviceInformation = {};
|
||||
|
||||
const skipValidate = settings.skipValidate?.toString() === 'true';
|
||||
const username = settings.username?.toString();
|
||||
const password = settings.password?.toString();
|
||||
const doorbell = settings.doorbell?.toString();
|
||||
const skipValidate = settings.skipValidate?.toString() === 'true';
|
||||
// verify password only has alphanumeric characters because reolink can't handle
|
||||
// url escaping.
|
||||
if (!skipValidate && !/^[a-zA-Z0-9]+$/.test(password))
|
||||
throw new Error('Change the password this Reolink device to be alphanumeric characters only. See https://docs.scrypted.app/camera-preparation.html#authentication-setup for more information.');
|
||||
let doorbell: boolean = false;
|
||||
let name: string = 'Reolink Camera';
|
||||
let deviceInfo: DevInfo;
|
||||
let ai;
|
||||
let abilities;
|
||||
const rtspChannel = parseInt(settings.rtspChannel?.toString()) || 0;
|
||||
if (!skipValidate) {
|
||||
const api = new ReolinkCameraClient(httpAddress, username, password, rtspChannel, this.console);
|
||||
try {
|
||||
const api = new ReolinkCameraClient(httpAddress, username, password, rtspChannel, this.console);
|
||||
await api.jpegSnapshot();
|
||||
// there doesn't seem to be a way to get the actual model number information out of their api.
|
||||
}
|
||||
catch (e) {
|
||||
this.console.error('Error adding Reolink camera', e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
try {
|
||||
deviceInfo = await api.getDeviceInfo();
|
||||
doorbell = deviceInfo.type === 'BELL';
|
||||
name = deviceInfo.name ?? 'Reolink Camera';
|
||||
ai = await api.getAiState();
|
||||
abilities = await api.getAbility();
|
||||
}
|
||||
catch (e) {
|
||||
this.console.error('Reolink camera does not support AI events', e);
|
||||
}
|
||||
}
|
||||
settings.newCamera ||= 'Reolink Camera';
|
||||
settings.newCamera ||= name;
|
||||
|
||||
nativeId = await super.createDevice(settings, nativeId);
|
||||
|
||||
@@ -529,11 +662,20 @@ class ReolinkProider extends RtspProvider {
|
||||
device.info = info;
|
||||
device.putSetting('username', username);
|
||||
device.putSetting('password', password);
|
||||
device.putSetting('doorbell', doorbell)
|
||||
device.putSetting('doorbell', doorbell.toString())
|
||||
device.storageSettings.values.deviceInfo = deviceInfo;
|
||||
device.storageSettings.values.abilities = abilities;
|
||||
device.storageSettings.values.hasObjectDetector = ai;
|
||||
device.setIPAddress(settings.ip?.toString());
|
||||
device.putSetting('rtspChannel', settings.rtspChannel?.toString());
|
||||
device.setHttpPortOverride(settings.httpPort?.toString());
|
||||
device.updateDeviceInfo();
|
||||
|
||||
if (abilities?.Ability?.supportAudioAlarm?.ver !== 0) {
|
||||
const sirenNativeId = device.createSiren();
|
||||
this.devices.set(sirenNativeId, device.siren);
|
||||
}
|
||||
|
||||
return nativeId;
|
||||
}
|
||||
|
||||
@@ -553,12 +695,6 @@ class ReolinkProider extends RtspProvider {
|
||||
title: 'IP Address',
|
||||
placeholder: '192.168.2.222',
|
||||
},
|
||||
{
|
||||
key: 'doorbell',
|
||||
title: 'Doorbell',
|
||||
description: 'This camera is a Reolink Doorbell.',
|
||||
type: 'boolean',
|
||||
},
|
||||
{
|
||||
key: 'rtspChannel',
|
||||
title: 'Channel Number Override',
|
||||
@@ -582,8 +718,15 @@ class ReolinkProider extends RtspProvider {
|
||||
}
|
||||
|
||||
createCamera(nativeId: string) {
|
||||
if (nativeId.endsWith('-siren')) {
|
||||
const camera = this.devices.get(nativeId.replace(/-siren/, '')) as ReolinkCamera;
|
||||
if (!camera.siren) {
|
||||
camera.siren = new ReolinkCameraSiren(camera, nativeId);
|
||||
}
|
||||
return camera.siren;
|
||||
}
|
||||
return new ReolinkCamera(nativeId, this);
|
||||
}
|
||||
}
|
||||
|
||||
export default ReolinkProider;
|
||||
export default ReolinkProvider;
|
||||
|
||||
@@ -59,6 +59,10 @@ export type AIState = {
|
||||
channel: number;
|
||||
};
|
||||
|
||||
export type SirenResponse = {
|
||||
rspCode: number;
|
||||
}
|
||||
|
||||
export class ReolinkCameraClient {
|
||||
credential: AuthFetchCredentialState;
|
||||
|
||||
@@ -122,7 +126,24 @@ export class ReolinkCameraClient {
|
||||
responseType: 'json',
|
||||
});
|
||||
return {
|
||||
value: response.body?.[0]?.value as AIState,
|
||||
value: (response.body?.[0]?.value || response.body?.value) as AIState,
|
||||
data: response.body,
|
||||
};
|
||||
}
|
||||
|
||||
async getAbility() {
|
||||
const url = new URL(`http://${this.host}/api.cgi`);
|
||||
const params = url.searchParams;
|
||||
params.set('cmd', 'GetAbility');
|
||||
params.set('channel', this.channelId.toString());
|
||||
params.set('user', this.username);
|
||||
params.set('password', this.password);
|
||||
const response = await this.request({
|
||||
url,
|
||||
responseType: 'json',
|
||||
});
|
||||
return {
|
||||
value: response.body?.[0]?.value || response.body?.value,
|
||||
data: response.body,
|
||||
};
|
||||
}
|
||||
@@ -247,4 +268,51 @@ export class ReolinkCameraClient {
|
||||
await this.ptzOp(op);
|
||||
}
|
||||
}
|
||||
|
||||
async setSiren(on: boolean, duration?: number) {
|
||||
const url = new URL(`http://${this.host}/api.cgi`);
|
||||
const params = url.searchParams;
|
||||
params.set('cmd', 'AudioAlarmPlay');
|
||||
params.set('user', this.username);
|
||||
params.set('password', this.password);
|
||||
const createReadable = (data: any) => {
|
||||
const pt = new PassThrough();
|
||||
pt.write(Buffer.from(JSON.stringify(data)));
|
||||
pt.end();
|
||||
return pt;
|
||||
}
|
||||
|
||||
let alarmMode;
|
||||
if (duration) {
|
||||
alarmMode = {
|
||||
alarm_mode: 'times',
|
||||
times: duration
|
||||
};
|
||||
}
|
||||
else {
|
||||
alarmMode = {
|
||||
alarm_mode: 'manul',
|
||||
manual_switch: on? 1 : 0
|
||||
};
|
||||
}
|
||||
|
||||
const response = await this.request({
|
||||
url,
|
||||
method: 'POST',
|
||||
responseType: 'json',
|
||||
}, createReadable([
|
||||
{
|
||||
cmd: "AudioAlarmPlay",
|
||||
action: 0,
|
||||
param: {
|
||||
channel: this.channelId,
|
||||
...alarmMode
|
||||
}
|
||||
},
|
||||
]));
|
||||
return {
|
||||
value: (response.body?.[0]?.value || response.body?.value) as SirenResponse,
|
||||
data: response.body,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
4
plugins/rknn/package-lock.json
generated
4
plugins/rknn/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/rknn",
|
||||
"version": "0.1.1",
|
||||
"version": "0.1.2",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/rknn",
|
||||
"version": "0.1.1",
|
||||
"version": "0.1.2",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -46,5 +46,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.1"
|
||||
"version": "0.1.2"
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
https://github.com/airockchip/rknn-toolkit2/raw/v2.0.0-beta0/rknn-toolkit-lite2/packages/rknn_toolkit_lite2-2.0.0b0-cp310-cp310-linux_aarch64.whl
|
||||
numpy<2.0.0
|
||||
pillow==10.3.0
|
||||
six==1.16.0
|
||||
shapely== 2.0.4
|
||||
|
||||
4
plugins/snapshot/package-lock.json
generated
4
plugins/snapshot/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/snapshot",
|
||||
"version": "0.2.52",
|
||||
"version": "0.2.53",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/snapshot",
|
||||
"version": "0.2.52",
|
||||
"version": "0.2.53",
|
||||
"dependencies": {
|
||||
"@types/node": "^20.10.6",
|
||||
"sharp": "^0.33.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/snapshot",
|
||||
"version": "0.2.52",
|
||||
"version": "0.2.53",
|
||||
"description": "Snapshot Plugin for Scrypted",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
|
||||
@@ -746,6 +746,7 @@ export class SnapshotPlugin extends AutoenableMixinProvider implements MixinProv
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
this.debugConsole?.error('snapshot http request failed', e);
|
||||
response.send('', {
|
||||
code: 500,
|
||||
});
|
||||
|
||||
4
plugins/tensorflow-lite/package-lock.json
generated
4
plugins/tensorflow-lite/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/tensorflow-lite",
|
||||
"version": "0.1.60",
|
||||
"version": "0.1.62",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/tensorflow-lite",
|
||||
"version": "0.1.60",
|
||||
"version": "0.1.62",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -53,5 +53,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.60"
|
||||
"version": "0.1.62"
|
||||
}
|
||||
|
||||
1
plugins/tensorflow-lite/src/detect
Symbolic link
1
plugins/tensorflow-lite/src/detect
Symbolic link
@@ -0,0 +1 @@
|
||||
../../openvino/src/detect/
|
||||
1
plugins/tensorflow-lite/src/predict
Symbolic link
1
plugins/tensorflow-lite/src/predict
Symbolic link
@@ -0,0 +1 @@
|
||||
../../openvino/src/predict
|
||||
@@ -1,8 +1,6 @@
|
||||
--extra-index-url https://google-coral.github.io/py-repo/
|
||||
# must ensure numpy is pinned to prevent dependencies with an unpinned numpy from pulling numpy>=2.0.
|
||||
numpy==1.26.4
|
||||
pycoral~=2.0
|
||||
tflite-runtime==2.5.0.post1
|
||||
|
||||
# pillow-simd is available on x64 linux
|
||||
# pillow-simd confirmed not building with arm64 linux or apple silicon
|
||||
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
|
||||
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
|
||||
Pillow==10.3.0
|
||||
|
||||
26
plugins/unifi-protect/package-lock.json
generated
26
plugins/unifi-protect/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/unifi-protect",
|
||||
"version": "0.0.146",
|
||||
"version": "0.0.153",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/unifi-protect",
|
||||
"version": "0.0.146",
|
||||
"version": "0.0.153",
|
||||
"license": "Apache",
|
||||
"dependencies": {
|
||||
"@koush/unifi-protect": "file:../../external/unifi-protect",
|
||||
@@ -27,12 +27,12 @@
|
||||
"dependencies": {
|
||||
"@scrypted/sdk": "file:../sdk",
|
||||
"@scrypted/server": "file:../server",
|
||||
"http-auth-utils": "^3.0.2",
|
||||
"node-fetch-commonjs": "^3.1.1",
|
||||
"typescript": "^4.4.3"
|
||||
"http-auth-utils": "^5.0.1",
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^16.9.0"
|
||||
"@types/node": "^20.11.0",
|
||||
"ts-node": "^10.9.2"
|
||||
}
|
||||
},
|
||||
"../../external/unifi-protect": {
|
||||
@@ -61,12 +61,12 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.103",
|
||||
"version": "0.3.31",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.6.5",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.15.9",
|
||||
@@ -260,10 +260,10 @@
|
||||
"requires": {
|
||||
"@scrypted/sdk": "file:../sdk",
|
||||
"@scrypted/server": "file:../server",
|
||||
"@types/node": "^16.9.0",
|
||||
"http-auth-utils": "^3.0.2",
|
||||
"node-fetch-commonjs": "^3.1.1",
|
||||
"typescript": "^4.4.3"
|
||||
"@types/node": "^20.11.0",
|
||||
"http-auth-utils": "^5.0.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
},
|
||||
"@scrypted/sdk": {
|
||||
@@ -273,7 +273,7 @@
|
||||
"@types/node": "^18.11.18",
|
||||
"@types/stringify-object": "^4.0.0",
|
||||
"adm-zip": "^0.4.13",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.6.5",
|
||||
"babel-loader": "^9.1.0",
|
||||
"babel-plugin-const-enum": "^1.1.0",
|
||||
"esbuild": "^0.15.9",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/unifi-protect",
|
||||
"version": "0.0.146",
|
||||
"version": "0.0.153",
|
||||
"description": "Unifi Protect Plugin for Scrypted",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache",
|
||||
|
||||
@@ -157,10 +157,11 @@ export class UnifiProtect extends ScryptedDeviceBase implements Settings, Device
|
||||
const payload = updatePacket.payload as ProtectNvrUpdatePayloadEventAdd;
|
||||
if (!payload.camera)
|
||||
return;
|
||||
const unifiCamera = this.cameras.get(payload.camera);
|
||||
const nativeId = this.getNativeId({ id: payload.camera }, false);
|
||||
const unifiCamera = this.cameras.get(nativeId);
|
||||
|
||||
if (!unifiCamera) {
|
||||
this.console.log('unknown device event, sync needed?', payload.camera);
|
||||
this.console.log('unknown device event, sync needed?', payload, nativeId);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -195,7 +196,7 @@ export class UnifiProtect extends ScryptedDeviceBase implements Settings, Device
|
||||
// id: '661d86bf03e69c03e408d62a',
|
||||
// modelKey: 'event'
|
||||
// }
|
||||
|
||||
|
||||
if (payload.type === 'smartDetectZone' || payload.type === 'smartDetectLine') {
|
||||
unifiCamera.resetDetectionTimeout();
|
||||
|
||||
@@ -602,12 +603,15 @@ export class UnifiProtect extends ScryptedDeviceBase implements Settings, Device
|
||||
return this.storageSettings.values.idMaps.nativeId?.[nativeId] || nativeId;
|
||||
}
|
||||
|
||||
getNativeId(device: any, update: boolean) {
|
||||
getNativeId(device: { id?: string, mac?: string; anonymousDeviceId?: string }, update: boolean) {
|
||||
const { id, mac, anonymousDeviceId } = device;
|
||||
const idMaps = this.storageSettings.values.idMaps;
|
||||
|
||||
// try to find an existing nativeId given the mac and anonymous device id
|
||||
const found = (mac && idMaps.mac[mac]) || (anonymousDeviceId && idMaps.anonymousDeviceId[anonymousDeviceId]);
|
||||
const found = (mac && idMaps.mac[mac])
|
||||
|| (anonymousDeviceId && idMaps.anonymousDeviceId[anonymousDeviceId])
|
||||
|| (id && idMaps.id[id])
|
||||
;
|
||||
|
||||
// use the found id if one exists (device got provisioned a new id), otherwise use the id provided by the device.
|
||||
const nativeId = found || id;
|
||||
|
||||
4
plugins/webrtc/package-lock.json
generated
4
plugins/webrtc/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/webrtc",
|
||||
"version": "0.2.24",
|
||||
"version": "0.2.27",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/webrtc",
|
||||
"version": "0.2.24",
|
||||
"version": "0.2.27",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/webrtc",
|
||||
"version": "0.2.24",
|
||||
"version": "0.2.27",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
"prescrypted-setup-project": "scrypted-package-json",
|
||||
|
||||
@@ -42,14 +42,14 @@ const fullResolutionAllowList = [
|
||||
|
||||
export async function createTrackForwarder(options: {
|
||||
timeStart: number,
|
||||
isLocalNetwork: boolean, destinationId: string, ipv4: boolean,
|
||||
isLocalNetwork: boolean, destinationId: string, ipv4: boolean, type: string,
|
||||
requestMediaStream: RequestMediaStream,
|
||||
videoTransceiver: RTCRtpTransceiver, audioTransceiver: RTCRtpTransceiver,
|
||||
maximumCompatibilityMode: boolean, clientOptions: RTCSignalingOptions,
|
||||
}) {
|
||||
const {
|
||||
timeStart,
|
||||
isLocalNetwork, destinationId,
|
||||
isLocalNetwork, destinationId, type,
|
||||
requestMediaStream,
|
||||
videoTransceiver, audioTransceiver,
|
||||
maximumCompatibilityMode,
|
||||
@@ -279,7 +279,17 @@ export async function createTrackForwarder(options: {
|
||||
// better knowledge of network capabilities, and also mirrors
|
||||
// from my cursory research into ipv6, the MTU is no lesser than ipv4, in fact
|
||||
// the min mtu is larger.
|
||||
const videoPacketSize = 1378;
|
||||
// 2024/06/20: webrtc MTU is typically 1200 as seen in chrome:
|
||||
// https://groups.google.com/g/discuss-webrtc/c/gH5ysR3SoZI
|
||||
// https://bloggeek.me/webrtcglossary/mtu-size/
|
||||
// apparently this is due to guaranteeing reliability for weird networks.
|
||||
// most of these networks can be correctly configured with an increased MTU (wireguard, tailscale),
|
||||
// but others can not, like iCloud Private Relay.
|
||||
// iCloud Private Relay ends up coming through TURN, as do many other restrictive networks.
|
||||
// so when a turn (aka relay) server is used, a smaller MTU must be used. Otherwise optimistically use
|
||||
// the normal/larger default.
|
||||
// After a bit of fiddling with iCloud Private Relay, 1246 was arrived at as the optimal value.
|
||||
const videoPacketSize = type === 'relay' ? 1246 : 1378;
|
||||
let h264Repacketizer: H264Repacketizer;
|
||||
let spsPps: ReturnType<typeof getSpsPps>;
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createBrowserSignalingSession } from "@scrypted/common/src/rtc-connect"
|
||||
import { legacyGetSignalingSessionOptions } from '@scrypted/common/src/rtc-signaling';
|
||||
import { SettingsMixinDeviceBase, SettingsMixinDeviceOptions } from '@scrypted/common/src/settings-mixin';
|
||||
import { createZygote } from '@scrypted/common/src/zygote';
|
||||
import sdk, { BufferConverter, ConnectOptions, DeviceCreator, DeviceCreatorSettings, DeviceProvider, FFmpegInput, HttpRequest, Intercom, MediaObject, MediaObjectOptions, MixinProvider, RTCSessionControl, RTCSignalingChannel, RTCSignalingClient, RTCSignalingOptions, RTCSignalingSession, RequestMediaStream, RequestMediaStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, SettingValue, Settings, VideoCamera, WritableDeviceState } from '@scrypted/sdk';
|
||||
import sdk, { BufferConverter, ConnectOptions, DeviceCreator, DeviceCreatorSettings, DeviceProvider, FFmpegInput, HttpRequest, Intercom, MediaObject, MediaObjectOptions, MixinProvider, RTCSessionControl, RTCSignalingChannel, RTCSignalingClient, RTCSignalingOptions, RTCSignalingSession, RequestMediaStream, RequestMediaStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, SettingValue, Settings, VideoCamera, WritableDeviceState } from '@scrypted/sdk';
|
||||
import { StorageSettings } from '@scrypted/sdk/storage-settings';
|
||||
import crypto from 'crypto';
|
||||
import ip from 'ip';
|
||||
@@ -19,7 +19,7 @@ import { waitClosed } from './peerconnection-util';
|
||||
import { WebRTCCamera } from "./webrtc-camera";
|
||||
import { MediaStreamTrack, PeerConfig, RTCPeerConnection, defaultPeerConfig } from './werift';
|
||||
import { WeriftSignalingSession } from './werift-signaling-session';
|
||||
import { createRTCPeerConnectionSource, getRTCMediaStreamOptions } from './wrtc-to-rtsp';
|
||||
import { RTCPeerConnectionPipe, createRTCPeerConnectionSource, getRTCMediaStreamOptions } from './wrtc-to-rtsp';
|
||||
|
||||
const { mediaManager, systemManager, deviceManager } = sdk;
|
||||
|
||||
@@ -78,7 +78,7 @@ class WebRTCMixin extends SettingsMixinDeviceBase<RTCSignalingClient & VideoCame
|
||||
timeStart: Date.now(),
|
||||
videoTransceiver: undefined,
|
||||
audioTransceiver,
|
||||
isLocalNetwork: undefined, destinationId: undefined, ipv4: undefined,
|
||||
isLocalNetwork: undefined, destinationId: undefined, ipv4: undefined, type: undefined,
|
||||
requestMediaStream: async () => media,
|
||||
maximumCompatibilityMode: false,
|
||||
clientOptions: undefined,
|
||||
@@ -154,15 +154,29 @@ class WebRTCMixin extends SettingsMixinDeviceBase<RTCSignalingClient & VideoCame
|
||||
return this.mixinDevice.getVideoStream(options);
|
||||
}
|
||||
|
||||
const { intercom, mediaObject, pcClose } = await createRTCPeerConnectionSource({
|
||||
console: this.console,
|
||||
const result = zygote();
|
||||
this.plugin.activeConnections++;
|
||||
result.worker.on('exit', () => {
|
||||
this.plugin.activeConnections--;
|
||||
});
|
||||
|
||||
const fork = await result.result;
|
||||
|
||||
const { getIntercom, mediaObject, pcClose } = await fork.createRTCPeerConnectionSource({
|
||||
__json_copy_serialize_children: true,
|
||||
nativeId: this.nativeId,
|
||||
mixinId: this.id,
|
||||
mediaStreamOptions: this.createVideoStreamOptions(),
|
||||
channel: this.mixinDevice,
|
||||
startRTCSignalingSession: (session) => this.mixinDevice.startRTCSignalingSession(session),
|
||||
maximumCompatibilityMode: this.plugin.storageSettings.values.maximumCompatibilityMode,
|
||||
});
|
||||
|
||||
this.webrtcIntercom = intercom;
|
||||
pcClose.finally(() => this.webrtcIntercom = undefined);
|
||||
this.webrtcIntercom = getIntercom();
|
||||
const pcc = pcClose();
|
||||
pcc.finally(() => {
|
||||
this.webrtcIntercom = undefined;
|
||||
result.worker.terminate();
|
||||
});
|
||||
|
||||
return mediaObject;
|
||||
}
|
||||
@@ -594,6 +608,23 @@ function handleCleanupConnection(cleanup: Deferred<string>, connection: WebRTCCo
|
||||
|
||||
export async function fork() {
|
||||
return {
|
||||
async createRTCPeerConnectionSource(options: {
|
||||
__json_copy_serialize_children: true,
|
||||
mixinId: string,
|
||||
nativeId: ScryptedNativeId,
|
||||
mediaStreamOptions: ResponseMediaStreamOptions,
|
||||
startRTCSignalingSession: (session: RTCSignalingSession) => Promise<RTCSessionControl | undefined>,
|
||||
maximumCompatibilityMode: boolean,
|
||||
}): Promise<RTCPeerConnectionPipe> {
|
||||
return createRTCPeerConnectionSource({
|
||||
nativeId: this.nativeId,
|
||||
mixinId: options.mixinId,
|
||||
mediaStreamOptions: options.mediaStreamOptions,
|
||||
startRTCSignalingSession: (session) => options.startRTCSignalingSession(session),
|
||||
maximumCompatibilityMode: options.maximumCompatibilityMode,
|
||||
});
|
||||
},
|
||||
|
||||
async createConnection(message: any,
|
||||
port: number,
|
||||
clientSession: RTCSignalingSession,
|
||||
|
||||
@@ -100,7 +100,6 @@ async function createTrackForwarders(console: Console, killDeferred: Deferred<vo
|
||||
if (track.ssrc)
|
||||
outputArguments.push('-ssrc', track.ssrc.toString());
|
||||
|
||||
attachTrackDgram(track, server);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -405,6 +404,11 @@ export async function startRtpForwarderProcess(console: Console, ffmpegInput: FF
|
||||
];
|
||||
|
||||
if (useRtp) {
|
||||
if (video?.bind?.server)
|
||||
attachTrackDgram(video, video.bind.server);
|
||||
if (audio?.bind?.server)
|
||||
attachTrackDgram(audio, audio.bind.server);
|
||||
|
||||
args.push(
|
||||
'-sdp_file', 'pipe:4',
|
||||
);
|
||||
@@ -433,8 +437,10 @@ export async function startRtpForwarderProcess(console: Console, ffmpegInput: FF
|
||||
const { videoSection, audioSection } = reportTranscodedSections(rtspServer.sdp);
|
||||
await rtspServer.handleSetup();
|
||||
|
||||
attachTrackDgram(video, rtspServer.setupTracks[videoSection?.control]?.rtp);
|
||||
attachTrackDgram(audio, rtspServer.setupTracks[audioSection?.control]?.rtp);
|
||||
if (video)
|
||||
attachTrackDgram(video, rtspServer.setupTracks[videoSection?.control]?.rtp);
|
||||
if (audio)
|
||||
attachTrackDgram(audio, rtspServer.setupTracks[audioSection?.control]?.rtp);
|
||||
|
||||
rtspServerDeferred.resolve(rtspServer);
|
||||
|
||||
|
||||
@@ -29,15 +29,17 @@ export class WebRTCCamera extends ScryptedDeviceBase implements VideoCamera, RTC
|
||||
async getVideoStream(options?: RequestMediaStreamOptions): Promise<MediaObject> {
|
||||
const mediaStreamOptions = getRTCMediaStreamOptions('webrtc', 'WebRTC');
|
||||
|
||||
const { mediaObject, intercom } = await createRTCPeerConnectionSource({
|
||||
console: this.console,
|
||||
// todo: sdk.fork
|
||||
const { mediaObject, getIntercom } = await createRTCPeerConnectionSource({
|
||||
mixinId: undefined,
|
||||
nativeId: this.nativeId,
|
||||
mediaStreamOptions,
|
||||
channel: this,
|
||||
startRTCSignalingSession: session => this.startRTCSignalingSession(session),
|
||||
maximumCompatibilityMode: this.plugin.storageSettings.values.maximumCompatibilityMode,
|
||||
});
|
||||
|
||||
this.intercom?.then(intercom => intercom.stopIntercom());
|
||||
this.intercom = intercom;
|
||||
this.intercom = getIntercom();
|
||||
|
||||
return mediaObject;
|
||||
}
|
||||
|
||||
@@ -53,9 +53,11 @@ function isPrivate(address: string) {
|
||||
export function isLocalIceTransport(pc: RTCPeerConnection) {
|
||||
let isLocalNetwork = true;
|
||||
let destinationId: string;
|
||||
let type: string;
|
||||
for (const ice of pc.iceTransports) {
|
||||
const { remoteAddr, localCandidate } = (ice.connection as any).nominated;
|
||||
const { remoteAddr, localCandidate, remoteCandidate } = (ice.connection as any).nominated;
|
||||
const [address, port] = remoteAddr;
|
||||
type = remoteCandidate.type;
|
||||
if (!destinationId)
|
||||
destinationId = address;
|
||||
|
||||
@@ -72,6 +74,7 @@ export function isLocalIceTransport(pc: RTCPeerConnection) {
|
||||
const ipv4 = ip.isV4Format(destinationId);
|
||||
return {
|
||||
ipv4,
|
||||
type,
|
||||
isLocalNetwork,
|
||||
destinationId,
|
||||
};
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Deferred } from "@scrypted/common/src/deferred";
|
||||
import { listenZeroSingleClient } from "@scrypted/common/src/listen-cluster";
|
||||
import { getNaluTypesInNalu, RtspServer } from "@scrypted/common/src/rtsp-server";
|
||||
import { createSdpInput, parseSdp } from '@scrypted/common/src/sdp-utils';
|
||||
import sdk, { FFmpegInput, Intercom, MediaObject, MediaStreamUrl, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingOptions, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedMimeTypes } from "@scrypted/sdk";
|
||||
import sdk, { FFmpegInput, Intercom, MediaObject, MediaStreamUrl, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingOptions, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedMimeTypes, ScryptedNativeId } from "@scrypted/sdk";
|
||||
import { FullIntraRequest } from "../../../external/werift/packages/rtp/src/rtcp/psfb/fullIntraRequest";
|
||||
import { logConnectionState, waitClosed, waitConnected, waitIceConnected } from "./peerconnection-util";
|
||||
import { startRtpForwarderProcess } from "./rtp-forwarders";
|
||||
@@ -13,9 +13,10 @@ import { createRawResponse, getWeriftIceServers, isPeerConnectionAlive, logIsLoc
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
export interface RTCPeerConnectionPipe {
|
||||
__json_copy_serialize_children: true,
|
||||
mediaObject: MediaObject;
|
||||
intercom: Promise<Intercom>;
|
||||
pcClose: Promise<unknown>;
|
||||
getIntercom(): Promise<Intercom>;
|
||||
pcClose(): Promise<unknown>;
|
||||
}
|
||||
|
||||
function ignoreDeferred(...d: Deferred<any>[]) {
|
||||
@@ -27,12 +28,14 @@ function ignorePromise(...p: Promise<any>[]) {
|
||||
}
|
||||
|
||||
export async function createRTCPeerConnectionSource(options: {
|
||||
console: Console,
|
||||
mixinId: string,
|
||||
nativeId: ScryptedNativeId,
|
||||
mediaStreamOptions: ResponseMediaStreamOptions,
|
||||
channel: RTCSignalingChannel,
|
||||
startRTCSignalingSession: (session: RTCSignalingSession) => Promise<RTCSessionControl | undefined>,
|
||||
maximumCompatibilityMode: boolean,
|
||||
}): Promise<RTCPeerConnectionPipe> {
|
||||
const { mediaStreamOptions, channel, console, maximumCompatibilityMode } = options;
|
||||
const { mediaStreamOptions, startRTCSignalingSession, mixinId, nativeId, maximumCompatibilityMode } = options;
|
||||
const console = mixinId ? sdk.deviceManager.getMixinConsole(mixinId, nativeId) : sdk.deviceManager.getDeviceConsole(nativeId);
|
||||
|
||||
const { clientPromise, port } = await listenZeroSingleClient();
|
||||
|
||||
@@ -45,9 +48,9 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
|
||||
const cleanup = () => {
|
||||
console.log('webrtc/rtsp cleaning up');
|
||||
clientPromise.then(client => client.destroy()).catch(() => {});
|
||||
sessionControl.promise.then(sc => sc.endSession()).catch(() => {});
|
||||
peerConnection.promise.then(pc => pc.close()).catch(() => {});
|
||||
clientPromise.then(client => client.destroy()).catch(() => { });
|
||||
sessionControl.promise.then(sc => sc.endSession()).catch(() => { });
|
||||
peerConnection.promise.then(pc => pc.close()).catch(() => { });
|
||||
ignorePromise(intercom.promise.then(intercom => intercom.stopIntercom()));
|
||||
};
|
||||
|
||||
@@ -74,6 +77,8 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
iceServers: getWeriftIceServers(setup.configuration),
|
||||
});
|
||||
|
||||
waitClosed(ret).then(() => cleanup());
|
||||
|
||||
logConnectionState(console, ret);
|
||||
peerConnection.resolve(ret);
|
||||
|
||||
@@ -101,6 +106,8 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
let gotVideo = false;
|
||||
|
||||
const pc = await peerConnection.promise;
|
||||
const timeout = setTimeout(() => cleanup(), 2 * 60 * 1000);
|
||||
waitClosed(pc).then(() => clearTimeout(timeout));
|
||||
|
||||
const setupAudioTranscevier = (transciever: RTCRtpTransceiver) => {
|
||||
audioTransceiver = transciever;
|
||||
@@ -135,6 +142,7 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
track.onReceiveRtcp.subscribe(rtp => rtspServer.sendTrack(videoTrack, rtp.serialize(), true));
|
||||
|
||||
track.onReceiveRtp.once(() => {
|
||||
clearTimeout(timeout);
|
||||
let firSequenceNumber = 0;
|
||||
const pictureLossInterval = setInterval(() => {
|
||||
// i think this is necessary for older clients like ring
|
||||
@@ -266,7 +274,7 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
}
|
||||
|
||||
const session = new SignalingSession();
|
||||
const sc = await channel.startRTCSignalingSession(session);
|
||||
const sc = await startRTCSignalingSession(session);
|
||||
sessionControl.resolve(sc);
|
||||
console.log('waiting for peer connection');
|
||||
const pc = await peerConnection.promise;
|
||||
@@ -275,7 +283,8 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
|
||||
let destroyProcess: () => void;
|
||||
|
||||
const ic: Intercom = {
|
||||
const ic: Intercom & { __json_copy_serialize_children: true } = {
|
||||
__json_copy_serialize_children: true,
|
||||
async startIntercom(media: MediaObject) {
|
||||
if (!isPeerConnectionAlive(pc))
|
||||
throw new Error('peer connection is closed');
|
||||
@@ -351,9 +360,10 @@ export async function createRTCPeerConnectionSource(options: {
|
||||
};
|
||||
|
||||
return {
|
||||
__json_copy_serialize_children: true,
|
||||
mediaObject: await mediaManager.createMediaObject(mediaStreamUrl, ScryptedMimeTypes.MediaStreamUrl),
|
||||
intercom: intercom.promise,
|
||||
pcClose,
|
||||
getIntercom: () => intercom.promise,
|
||||
pcClose: () => pcClose,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user