Compare commits

..

1 Commits

Author SHA1 Message Date
Koushik Dutta
fe165295fb postrelease 2024-03-23 12:34:03 -07:00
51 changed files with 291 additions and 802 deletions

View File

@@ -136,17 +136,12 @@ export async function readLine(readable: Readable) {
}
export async function readString(readable: Readable | Promise<Readable>) {
const buffer = await readBuffer(readable);
return buffer.toString();
}
export async function readBuffer(readable: Readable | Promise<Readable>) {
const buffers: Buffer[] = [];
let data = '';
readable = await readable;
readable.on('data', buffer => {
buffers.push(buffer);
data += buffer.toString();
});
readable.resume();
await once(readable, 'end')
return Buffer.concat(buffers);
return data;
}

View File

@@ -1,6 +1,6 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "20-jammy-full.s6-v0.96.0"
version: "18-jammy-full.s6-v0.93.0"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"

View File

@@ -10,7 +10,7 @@ function readyn() {
}
cd /tmp
SCRYPTED_VERSION=v0.96.0
SCRYPTED_VERSION=v0.93.0
SCRYPTED_TAR_ZST=scrypted-$SCRYPTED_VERSION.tar.zst
if [ -z "$VMID" ]
then

View File

@@ -1,12 +1,12 @@
{
"name": "scrypted",
"version": "1.3.14",
"version": "1.3.13",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "scrypted",
"version": "1.3.14",
"version": "1.3.13",
"license": "ISC",
"dependencies": {
"@scrypted/client": "^1.3.3",

View File

@@ -1,6 +1,6 @@
{
"name": "scrypted",
"version": "1.3.14",
"version": "1.3.13",
"description": "",
"main": "./dist/packages/cli/src/main.js",
"bin": {

View File

@@ -133,7 +133,11 @@ export async function serveMain(installVersion?: string) {
await startServer(installDir);
if (fs.existsSync(UPDATE_FILE)) {
if (fs.existsSync(EXIT_FILE)) {
console.log('Exiting.');
process.exit(1);
}
else if (fs.existsSync(UPDATE_FILE)) {
console.log('Update requested. Installing.');
await runCommandEatError('npm', '--prefix', installDir, 'install', '--production', '@scrypted/server@latest').catch(e => {
console.error('Update failed', e);
@@ -141,10 +145,6 @@ export async function serveMain(installVersion?: string) {
console.log('Exiting.');
process.exit(1);
}
else if (fs.existsSync(EXIT_FILE)) {
console.log('Exiting.');
process.exit(1);
}
else {
console.log(`Service unexpectedly exited. Restarting momentarily.`);
await sleep(10000);

View File

@@ -10,7 +10,7 @@
"port": 10081,
"request": "attach",
"skipFiles": [
"**/plugin-console.*",
"**/plugin-remote-worker.*",
"<node_internals>/**"
],
"preLaunchTask": "scrypted: deploy+debug",

View File

@@ -1,21 +1,19 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.144",
"version": "0.0.135",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.144",
"version": "0.0.135",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"content-type": "^1.0.5"
"@scrypted/sdk": "file:../../sdk"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^20.10.8"
}
},
"../../common": {
@@ -25,22 +23,23 @@
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/node": "^20.10.8",
"ts-node": "^10.9.2"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.29",
"version": "0.3.4",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
@@ -78,29 +77,15 @@
"resolved": "../../sdk",
"link": true
},
"node_modules/@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"node_modules/@types/node": {
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"version": "20.10.8",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.8.tgz",
"integrity": "sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.144",
"version": "0.0.135",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -36,11 +36,9 @@
},
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"content-type": "^1.0.5"
"@scrypted/sdk": "file:../../sdk"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^20.10.8"
}
}

View File

@@ -1,74 +1,6 @@
import { AuthFetchCredentialState, HttpFetchOptions, authHttpFetch } from '@scrypted/common/src/http-auth-fetch';
import { readLine } from '@scrypted/common/src/read-stream';
import { parseHeaders, readBody, readMessage } from '@scrypted/common/src/rtsp-server';
import contentType from 'content-type';
import { IncomingMessage } from 'http';
import { EventEmitter, Readable } from 'stream';
import { Destroyable } from '../../rtsp/src/rtsp';
import { Readable } from 'stream';
import { getDeviceInfo } from './probe';
import { Point } from '@scrypted/sdk';
// {
// "Action" : "Cross",
// "Class" : "Normal",
// "CountInGroup" : 1,
// "DetectRegion" : [
// [ 455, 260 ],
// [ 3586, 260 ],
// [ 3768, 7580 ],
// [ 382, 7451 ]
// ],
// "Direction" : "Enter",
// "EventID" : 10181,
// "GroupID" : 0,
// "Name" : "Rule1",
// "Object" : {
// "Action" : "Appear",
// "BoundingBox" : [ 2856, 1280, 3880, 4880 ],
// "Center" : [ 3368, 3080 ],
// "Confidence" : 0,
// "LowerBodyColor" : [ 0, 0, 0, 0 ],
// "MainColor" : [ 0, 0, 0, 0 ],
// "ObjectID" : 863,
// "ObjectType" : "Human",
// "RelativeID" : 0,
// "Speed" : 0
// },
// "PTS" : 43380319830.0,
// "RuleID" : 2,
// "Track" : [],
// "UTC" : 1711446999,
// "UTCMS" : 701
// }
export interface AmcrestObjectDetails {
Action: string;
BoundingBox: Point;
Center: Point;
Confidence: number;
LowerBodyColor: [number, number, number, number];
MainColor: [number, number, number, number];
ObjectID: number;
ObjectType: string;
RelativeID: number;
Speed: number;
}
export interface AmcrestEventData {
Action: string;
Class: string;
CountInGroup: number;
DetectRegion: Point[];
Direction: string;
EventID: number;
GroupID: number;
Name: string;
Object: AmcrestObjectDetails;
PTS: number;
RuleID: number;
Track: any[];
UTC: number;
UTCMS: number;
}
export enum AmcrestEvent {
MotionStart = "Code=VideoMotion;action=Start",
@@ -86,10 +18,6 @@ export enum AmcrestEvent {
DahuaTalkHangup = "Code=PassiveHungup;action=Start",
DahuaCallDeny = "Code=HungupPhone;action=Pulse",
DahuaTalkPulse = "Code=_CallNoAnswer_;action=Pulse",
SmartMotionHuman = "Code=SmartMotionHuman;action=Start",
SmartMotionVehicle = "Code=Vehicle;action=Start",
CrossLineDetection = "Code=CrossLineDetection;action=Start",
CrossRegionDetection = "Code=CrossRegionDetection;action=Start",
}
export class AmcrestCameraClient {
@@ -150,8 +78,7 @@ export class AmcrestCameraClient {
return response.body;
}
async listenEvents(): Promise<Destroyable> {
const events = new EventEmitter();
async listenEvents() {
const url = `http://${this.ip}/cgi-bin/eventManager.cgi?action=attach&codes=[All]`;
console.log('preparing event listener', url);
@@ -159,102 +86,32 @@ export class AmcrestCameraClient {
url,
responseType: 'readable',
});
const stream: IncomingMessage = response.body;
(events as any).destroy = () => {
stream.destroy();
events.removeAllListeners();
};
stream.on('close', () => {
events.emit('close');
});
stream.on('end', () => {
events.emit('end');
});
stream.on('error', e => {
events.emit('error', e);
});
const stream = response.body;
stream.socket.setKeepAlive(true);
const ct = stream.headers['content-type'];
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
(async () => {
while (true) {
let ignore = await readLine(stream);
ignore = ignore.trim();
if (!ignore)
continue;
if (ignore === boundaryEnd)
continue;
if (ignore !== boundary) {
this.console.error('expected boundary but found', ignore);
throw new Error('expected boundary');
}
const message = await readMessage(stream);
events.emit('data', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
const data = body.toString();
events.emit('data', data);
const parts = data.split(';');
let index: string;
try {
for (const part of parts) {
if (part.startsWith('index')) {
index = part.split('=')[1]?.trim();
}
}
}
catch (e) {
this.console.error('error parsing index', data);
}
let jsonData: any;
try {
for (const part of parts) {
if (part.startsWith('data')) {
jsonData = JSON.parse(part.split('=')[1]?.trim());
}
}
}
catch (e) {
this.console.error('error parsing data', data);
}
for (const event of Object.values(AmcrestEvent)) {
if (data.indexOf(event) !== -1) {
events.emit('event', event, index, data);
if (event === AmcrestEvent.SmartMotionHuman) {
events.emit('smart', 'person', jsonData);
}
else if (event === AmcrestEvent.SmartMotionVehicle) {
events.emit('smart', 'car', jsonData);
}
else if (event === AmcrestEvent.CrossLineDetection || event === AmcrestEvent.CrossRegionDetection) {
const eventData: AmcrestEventData = jsonData;
if (eventData?.Object?.ObjectType === 'Human') {
events.emit('smart', 'person', eventData);
}
else if (eventData?.Object?.ObjectType === 'Vehicle') {
events.emit('smart', 'car', eventData);
}
}
stream.on('data', (buffer: Buffer) => {
const data = buffer.toString();
const parts = data.split(';');
let index: string;
try {
for (const part of parts) {
if (part.startsWith('index')) {
index = part.split('=')[1]?.trim();
}
}
}
})()
.catch(() => stream.destroy());
return events as any as Destroyable;
catch (e) {
this.console.error('error parsing index', data);
}
// this.console?.log('event', data);
for (const event of Object.values(AmcrestEvent)) {
if (data.indexOf(event) !== -1) {
stream.emit('event', event, index, data);
}
}
});
return stream;
}
async enableContinousRecording(channel: number) {

View File

@@ -1,11 +1,11 @@
import { ffmpegLogInitialOutput } from '@scrypted/common/src/media-helpers';
import { readLength } from "@scrypted/common/src/read-stream";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, Lock, MediaObject, MediaStreamOptions, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, Reboot, RequestPictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, Lock, MediaObject, MediaStreamOptions, Reboot, RequestPictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import child_process, { ChildProcess } from 'child_process';
import { PassThrough, Readable, Stream } from "stream";
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { AmcrestCameraClient, AmcrestEvent, AmcrestEventData } from "./amcrest-api";
import { AmcrestCameraClient, AmcrestEvent } from "./amcrest-api";
const { mediaManager } = sdk;
@@ -22,13 +22,12 @@ function findValue(blob: string, prefix: string, key: string) {
return parts[1];
}
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, Lock, VideoRecorder, Reboot, ObjectDetector {
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, Lock, VideoRecorder, Reboot {
eventStream: Stream;
cp: ChildProcess;
client: AmcrestCameraClient;
videoStreamOptions: Promise<UrlMediaStreamOptions[]>;
onvifIntercom = new OnvifIntercom(this);
hasSmartDetection: boolean;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
@@ -37,7 +36,6 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
this.storage.removeItem('amcrestDoorbell');
}
this.hasSmartDetection = this.storage.getItem('hasSmartDetection') === 'true';
this.updateDevice();
this.updateDeviceInfo();
}
@@ -186,11 +184,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
if (idx.toString() !== channelNumber)
return;
}
if (event === AmcrestEvent.MotionStart
|| event === AmcrestEvent.SmartMotionHuman
|| event === AmcrestEvent.SmartMotionVehicle
|| event === AmcrestEvent.CrossLineDetection
|| event === AmcrestEvent.CrossRegionDetection) {
if (event === AmcrestEvent.MotionStart) {
this.motionDetected = true;
resetMotionTimeout();
}
@@ -237,42 +231,9 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
});
events.on('smart', (className: string, data: AmcrestEventData) => {
if (!this.hasSmartDetection) {
this.hasSmartDetection = true;
this.storage.setItem('hasSmartDetection', 'true');
this.updateDevice();
}
const detected: ObjectsDetected = {
timestamp: Date.now(),
detections: [
{
score: 1,
className,
}
],
};
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detected);
});
return events;
}
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
return;
}
async getObjectTypes(): Promise<ObjectDetectionTypes> {
return {
classes: [
'person',
'car',
],
}
}
async getOtherSettings(): Promise<Setting[]> {
const ret = await super.getOtherSettings();
ret.push(
@@ -511,19 +472,13 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
if (isDoorbell || twoWayAudio) {
interfaces.push(ScryptedInterface.Intercom);
}
const enableDahuaLock = this.storage.getItem('enableDahuaLock') === 'true';
if (isDoorbell && doorbellType === DAHUA_DOORBELL_TYPE && enableDahuaLock) {
interfaces.push(ScryptedInterface.Lock);
}
const continuousRecording = this.storage.getItem('continuousRecording') === 'true';
if (continuousRecording)
interfaces.push(ScryptedInterface.VideoRecorder);
if (this.hasSmartDetection)
interfaces.push(ScryptedInterface.ObjectDetector);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
@@ -566,7 +521,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
const doorbellType = this.storage.getItem('doorbellType');
// not sure if this all works, since i don't actually have a doorbell.
// good luck!
const channel = this.getRtspChannel() || '1';
@@ -593,22 +548,12 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
else {
args.push(
"-vn",
'-acodec', 'aac',
'-f', 'adts',
'pipe:3',
);
"-vn",
'-acodec', 'aac',
'-f', 'adts',
'pipe:3',
);
contentType = 'Audio/AAC';
// args.push(
// "-vn",
// '-acodec', 'pcm_mulaw',
// '-ac', '1',
// '-ar', '8000',
// '-sample_fmt', 's16',
// '-f', 'mulaw',
// 'pipe:3',
// );
// contentType = 'Audio/G.711A';
}
this.console.log('ffmpeg intercom', args);
@@ -628,19 +573,15 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
// seems the dahua doorbells preferred 1024 chunks. should investigate adts
// parsing and sending multipart chunks instead.
const passthrough = new PassThrough();
const abortController = new AbortController();
this.getClient().request({
url,
method: 'POST',
headers: {
'Content-Type': contentType,
'Content-Length': '9999999',
'Content-Length': '9999999'
},
signal: abortController.signal,
responseType: 'readable',
}, passthrough)
.catch(() => { })
.finally(() => this.console.log('request finished'))
}, passthrough);
try {
while (true) {
@@ -652,8 +593,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
finally {
this.console.log('audio finished');
passthrough.destroy();
abortController.abort();
passthrough.end();
}
this.stopIntercom();

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/chromecast",
"version": "0.1.58",
"version": "0.1.57",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/chromecast",
"version": "0.1.58",
"version": "0.1.57",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/chromecast",
"version": "0.1.58",
"version": "0.1.57",
"description": "Send video, audio, and text to speech notifications to Chromecast and Google Home devices",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -183,7 +183,7 @@ class CastDevice extends ScryptedDeviceBase implements MediaPlayer, Refresh, Eng
media = await mediaManager.createMediaObjectFromUrl(media);
}
}
else if (options?.mimeType?.startsWith('image/') || options?.mimeType?.startsWith('audio/')) {
else if (options?.mimeType?.startsWith('image/')) {
url = await mediaManager.convertMediaObjectToInsecureLocalUrl(media, options?.mimeType);
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.3.19",
"version": "0.3.18",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.3.19",
"version": "0.3.18",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.3.19",
"version": "0.3.18",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -11,7 +11,7 @@ export async function checkLxcDependencies() {
let needRestart = false;
if (!process.version.startsWith('v20.')) {
const cp = child_process.spawn('sh', ['-c', 'apt update -y && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && apt install -y nodejs']);
const cp = child_process.spawn('sh', ['-c', 'curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && apt install -y nodejs']);
const [exitCode] = await once(cp, 'exit');
if (exitCode !== 0)
sdk.log.a('Failed to install Node.js 20.x.');

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/coreml",
"version": "0.1.30",
"version": "0.1.29",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.30",
"version": "0.1.29",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.30"
"version": "0.1.29"
}

View File

@@ -12,14 +12,13 @@ from PIL import Image
from scrypted_sdk import Setting, SettingValue
import yolo
import ast
from predict import Prediction, PredictPlugin, Rectangle
predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "CoreML-Predict")
def parse_label_contents(contents: str):
lines = contents.split(',')
lines = contents.splitlines()
ret = {}
for row_number, content in enumerate(lines):
pair = re.split(r"[:\s]+", content.strip(), maxsplit=1)
@@ -30,20 +29,6 @@ def parse_label_contents(contents: str):
return ret
def parse_labels(userDefined):
yolo = userDefined.get("names") or userDefined.get("yolo.names")
if yolo:
j = ast.literal_eval(yolo)
ret = {}
for k, v in j.items():
ret[int(k)] = v
return ret
classes = userDefined.get("classes")
if not classes:
raise Exception("no classes found in model metadata")
return parse_label_contents(classes)
class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
@@ -54,23 +39,30 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
self.yolo = "yolo" in model
self.yolov8 = "yolov8" in model
self.yolov9 = "yolov9" in model
self.scrypted_model = "scrypted" in model
model_version = "v3"
mlmodel = 'model' if self.yolov8 or self.yolov9 else model
model_version = "v2"
print(f"model: {model}")
if not self.yolo:
# todo convert these to mlpackage
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_labels.txt",
"coco_labels.txt",
)
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{mlmodel}.mlmodel",
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
else:
if self.yolov8 or self.yolov9:
if self.yolov8:
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
elif self.yolov9:
files = [
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
@@ -85,7 +77,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/FeatureDescriptions.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/Metadata.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
@@ -96,6 +88,11 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
)
modelFile = os.path.dirname(p)
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_80cl.txt",
f"{model_version}/{model}/coco_80cl.txt",
)
self.model = ct.models.MLModel(modelFile)
self.modelspec = self.model.get_spec()
@@ -103,7 +100,10 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
self.inputheight = self.inputdesc.type.imageType.height
self.inputwidth = self.inputdesc.type.imageType.width
self.labels = parse_labels(self.modelspec.description.metadata.userDefined)
labels_contents = open(labelsFile, "r").read()
self.labels = parse_label_contents(labels_contents)
# csv in mobilenet model
# self.modelspec.description.metadata.userDefined['classes']
self.loop = asyncio.get_event_loop()
self.minThreshold = 0.2
@@ -116,13 +116,11 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
"description": "The detection model used to find objects.",
"choices": [
"Default",
"scrypted_yolov8n_320",
"ssdlite_mobilenet_v2",
"yolov4-tiny",
"yolov8n",
"yolov8n_320",
"yolov9c_320",
"ssdlite_mobilenet_v2",
"yolov8n",
"yolov9c",
"yolov4-tiny",
],
"value": model,
},

View File

@@ -1,23 +1,22 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.146",
"version": "0.0.137",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/hikvision",
"version": "0.0.146",
"version": "0.0.137",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.14",
"content-type": "^1.0.5",
"xml2js": "^0.6.2"
"@types/xml2js": "^0.4.11",
"lodash": "^4.17.21",
"xml2js": "^0.6.0"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^18.15.11"
}
},
"../../common": {
@@ -28,16 +27,17 @@
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/node": "^20.10.8",
"ts-node": "^10.9.2"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.29",
"version": "0.3.4",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -83,50 +83,33 @@
"resolved": "../../sdk",
"link": true
},
"node_modules/@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"node_modules/@types/node": {
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"dependencies": {
"undici-types": "~5.26.4"
}
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/@types/xml2js": {
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"node_modules/xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
@@ -150,8 +133,9 @@
"requires": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"@types/node": "^20.11.0",
"@types/node": "^20.10.8",
"http-auth-utils": "^5.0.1",
"node-fetch-commonjs": "^3.1.1",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
}
@@ -180,47 +164,33 @@
"webpack-bundle-analyzer": "^4.5.0"
}
},
"@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"@types/node": {
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"requires": {
"undici-types": "~5.26.4"
}
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"@types/xml2js": {
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"requires": {
"@types/node": "*"
}
},
"content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.146",
"version": "0.0.137",
"description": "Hikvision Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -37,12 +37,11 @@
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.14",
"content-type": "^1.0.5",
"xml2js": "^0.6.2"
"@types/xml2js": "^0.4.11",
"lodash": "^4.17.21",
"xml2js": "^0.6.0"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^18.15.11"
}
}

View File

@@ -1,17 +1,8 @@
import { AuthFetchCredentialState, HttpFetchOptions, authHttpFetch } from '@scrypted/common/src/http-auth-fetch';
import { readLine } from '@scrypted/common/src/read-stream';
import { parseHeaders, readBody, readMessage } from '@scrypted/common/src/rtsp-server';
import contentType from 'content-type';
import { IncomingMessage } from 'http';
import { EventEmitter, Readable } from 'stream';
import { Destroyable } from '../../rtsp/src/rtsp';
import { Readable } from 'stream';
import { getDeviceInfo } from './probe';
export const detectionMap = {
human: 'person',
vehicle: 'car',
}
export function getChannel(channel: string) {
return channel || '101';
}
@@ -24,8 +15,6 @@ export enum HikvisionCameraEvent {
// <eventType>linedetection</eventType>
// <eventState>inactive</eventState>
LineDetection = "<eventType>linedetection</eventType>",
RegionEntrance = "<eventType>regionEntrance</eventType>",
RegionExit = "<eventType>regionExit</eventType>",
// <eventType>fielddetection</eventType>
// <eventState>active</eventState>
// <eventType>fielddetection</eventType>
@@ -42,7 +31,7 @@ export interface HikvisionCameraStreamSetup {
export class HikvisionCameraAPI {
credential: AuthFetchCredentialState;
deviceModel: Promise<string>;
listenerPromise: Promise<Destroyable>;
listenerPromise: Promise<IncomingMessage>;
constructor(public ip: string, username: string, password: string, public console: Console) {
this.credential = {
@@ -140,106 +129,35 @@ export class HikvisionCameraAPI {
return response.body;
}
async listenEvents(): Promise<Destroyable> {
const events = new EventEmitter();
(events as any).destroy = () => { };
async listenEvents() {
// support multiple cameras listening to a single single stream
if (!this.listenerPromise) {
const url = `http://${this.ip}/ISAPI/Event/notification/alertStream`;
let lastSmartDetection: string;
this.listenerPromise = this.request({
url,
responseType: 'readable',
}).then(response => {
const stream: IncomingMessage = response.body;
(events as any).destroy = () => {
stream.destroy();
events.removeAllListeners();
};
stream.on('close', () => {
this.listenerPromise = undefined;
events.emit('close');
});
stream.on('end', () => {
this.listenerPromise = undefined;
events.emit('end');
});
stream.on('error', e => {
this.listenerPromise = undefined;
events.emit('error', e);
});
const stream = response.body;
stream.socket.setKeepAlive(true);
const ct = stream.headers['content-type'];
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
(async () => {
while (true) {
let ignore = await readLine(stream);
ignore = ignore.trim();
if (!ignore)
continue;
if (ignore === boundaryEnd)
continue;
if (ignore !== boundary) {
this.console.error('expected boundary but found', ignore);
throw new Error('expected boundary');
}
const message = await readMessage(stream);
events.emit('data', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
try {
if (!headers['content-type'].includes('application/xml') && lastSmartDetection) {
if (!headers['content-type']?.startsWith('image/jpeg')) {
continue;
}
events.emit('smart', lastSmartDetection, body);
lastSmartDetection = undefined;
continue;
}
}
finally {
// is it possible that smart detections are sent without images?
// if so, flush this detection.
if (lastSmartDetection) {
events.emit('smart', lastSmartDetection);
}
}
const data = body.toString();
events.emit('data', data);
for (const event of Object.values(HikvisionCameraEvent)) {
if (data.indexOf(event) !== -1) {
const cameraNumber = data.match(/<channelID>(.*?)</)?.[1] || data.match(/<dynChannelID>(.*?)</)?.[1];
const inactive = data.indexOf('<eventState>inactive</eventState>') !== -1;
events.emit('event', event, cameraNumber, inactive, data);
if (event === HikvisionCameraEvent.LineDetection
|| event === HikvisionCameraEvent.RegionEntrance
|| event === HikvisionCameraEvent.RegionExit
|| event === HikvisionCameraEvent.FieldDetection) {
lastSmartDetection = data;
}
}
stream.on('data', (buffer: Buffer) => {
const data = buffer.toString();
for (const event of Object.values(HikvisionCameraEvent)) {
if (data.indexOf(event) !== -1) {
const cameraNumber = data.match(/<channelID>(.*?)</)?.[1] || data.match(/<dynChannelID>(.*?)</)?.[1];
const inactive = data.indexOf('<eventState>inactive</eventState>') !== -1;
stream.emit('event', event, cameraNumber, inactive, data);
}
}
})()
.catch(() => stream.destroy());
return events as any as Destroyable;
});
return stream;
});
this.listenerPromise.catch(() => this.listenerPromise = undefined);
this.listenerPromise.then(stream => {
stream.on('close', () => this.listenerPromise = undefined);
stream.on('end', () => this.listenerPromise = undefined);
});
}
return this.listenerPromise;

View File

@@ -1,12 +1,11 @@
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting } from "@scrypted/sdk";
import crypto from 'crypto';
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting } from "@scrypted/sdk";
import { PassThrough } from "stream";
import xml2js from 'xml2js';
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
import { HikvisionCameraAPI, HikvisionCameraEvent, detectionMap } from "./hikvision-camera-api";
import { HikvisionCameraAPI, HikvisionCameraEvent } from "./hikvision-camera-api";
const { mediaManager } = sdk;
@@ -16,17 +15,15 @@ function channelToCameraNumber(channel: string) {
return channel.substring(0, channel.length - 2);
}
class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboot, ObjectDetector {
class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboot {
detectedChannels: Promise<Map<string, MediaStreamOptions>>;
client: HikvisionCameraAPI;
onvifIntercom = new OnvifIntercom(this);
activeIntercom: Awaited<ReturnType<typeof startRtpForwarderProcess>>;
hasSmartDetection: boolean;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
this.hasSmartDetection = this.storage.getItem('hasSmartDetection') === 'true';
this.updateDevice();
this.updateDeviceInfo();
}
@@ -66,52 +63,41 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
let ignoreCameraNumber: boolean;
const motionTimeoutDuration = 20000;
// check if the camera+channel field is in use, and filter events.
const checkCameraNumber = async (cameraNumber: string) => {
// check if the camera+channel field is in use, and filter events.
if (this.getRtspChannel()) {
// it is possible to set it up to use a camera number
// on an nvr IP (which gives RTSP urls through the NVR), but then use a http port
// that gives a filtered event stream from only that camera.
// this this case, the camera numbers will not
// match as they will be always be "1".
// to detect that a camera specific endpoint is being used
// can look at the channel ids, and see if that camera number is found.
// this is different from the use case where the NVR or camera
// is using a port other than 80 (the default).
// could add a setting to have the user explicitly denote nvr usage
// but that is error prone.
const userCameraNumber = this.getCameraNumber();
if (ignoreCameraNumber === undefined && this.detectedChannels) {
const channelIds = (await this.detectedChannels).keys();
ignoreCameraNumber = true;
for (const id of channelIds) {
if (channelToCameraNumber(id) === userCameraNumber) {
ignoreCameraNumber = false;
break;
}
}
}
if (!ignoreCameraNumber && cameraNumber !== userCameraNumber) {
// this.console.error(`### Skipping motion event ${cameraNumber} != ${this.getCameraNumber()}`);
return false;
}
}
return true;
};
events.on('event', async (event: HikvisionCameraEvent, cameraNumber: string, inactive: boolean) => {
if (event === HikvisionCameraEvent.MotionDetected
|| event === HikvisionCameraEvent.LineDetection
|| event === HikvisionCameraEvent.RegionEntrance
|| event === HikvisionCameraEvent.RegionExit
|| event === HikvisionCameraEvent.FieldDetection) {
if (!await checkCameraNumber(cameraNumber))
return;
// check if the camera+channel field is in use, and filter events.
if (this.getRtspChannel()) {
// it is possible to set it up to use a camera number
// on an nvr IP (which gives RTSP urls through the NVR), but then use a http port
// that gives a filtered event stream from only that camera.
// this this case, the camera numbers will not
// match as they will be always be "1".
// to detect that a camera specific endpoint is being used
// can look at the channel ids, and see if that camera number is found.
// this is different from the use case where the NVR or camera
// is using a port other than 80 (the default).
// could add a setting to have the user explicitly denote nvr usage
// but that is error prone.
const userCameraNumber = this.getCameraNumber();
if (ignoreCameraNumber === undefined && this.detectedChannels) {
const channelIds = (await this.detectedChannels).keys();
ignoreCameraNumber = true;
for (const id of channelIds) {
if (channelToCameraNumber(id) === userCameraNumber) {
ignoreCameraNumber = false;
break;
}
}
}
if (!ignoreCameraNumber && cameraNumber !== userCameraNumber) {
// this.console.error(`### Skipping motion event ${cameraNumber} != ${this.getCameraNumber()}`);
return;
}
}
this.motionDetected = true;
clearTimeout(motionTimeout);
@@ -120,107 +106,11 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
this.motionDetected = false;
}, motionTimeoutDuration);
}
});
let inputDimensions: [number, number];
events.on('smart', async (data: string, image: Buffer) => {
if (!this.hasSmartDetection) {
this.hasSmartDetection = true;
this.storage.setItem('hasSmartDetection', 'true');
this.updateDevice();
}
const xml = await xml2js.parseStringPromise(data);
const [channelId] = xml.EventNotificationAlert.channelID;
if (!await checkCameraNumber(channelId)) {
this.console.warn('chann fail')
return;
}
const now = Date.now();
let detections: ObjectDetectionResult[] = xml.EventNotificationAlert?.DetectionRegionList?.map(region => {
const { DetectionRegionEntry } = region;
const dre = DetectionRegionEntry[0];
if (!DetectionRegionEntry)
return;
const { detectionTarget } = dre;
// const { TargetRect } = dre;
// const { X, Y, width, height } = TargetRect[0];
const [name] = detectionTarget;
return {
score: 1,
className: detectionMap[name] || name,
// boundingBox: [
// parseInt(X),
// parseInt(Y),
// parseInt(width),
// parseInt(height),
// ],
// movement: {
// moving: true,
// firstSeen: now,
// lastSeen: now,
// }
} as ObjectDetectionResult;
});
detections = detections?.filter(d => d);
if (!detections?.length)
return;
// if (inputDimensions === undefined && loadSharp()) {
// try {
// const { image: i, metadata } = await loadVipsMetadata(image);
// i.destroy();
// inputDimensions = [metadata.width, metadata.height];
// }
// catch (e) {
// inputDimensions = null;
// }
// finally {
// }
// }
let detectionId: string;
if (image) {
detectionId = crypto.randomBytes(4).toString('hex');
this.recentDetections.set(detectionId, image);
setTimeout(() => this.recentDetections.delete(detectionId), 10000);
}
const detected: ObjectsDetected = {
inputDimensions,
detectionId,
timestamp: now,
detections,
};
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detected);
});
})
return events;
}
recentDetections = new Map<string, Buffer>();
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
const image = this.recentDetections.get(detectionId);
if (!image)
return;
return mediaManager.createMediaObject(image, 'image/jpeg');
}
async getObjectTypes(): Promise<ObjectDetectionTypes> {
return {
classes: [
...Object.values(detectionMap),
]
}
}
createClient() {
return new HikvisionCameraAPI(this.getHttpAddress(), this.getUsername(), this.getPassword(), this.console);
}
@@ -394,9 +284,6 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
interfaces.push(ScryptedInterface.Intercom);
}
if (this.hasSmartDetection)
interfaces.push(ScryptedInterface.ObjectDetector);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
@@ -521,7 +408,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
const put = this.getClient().request({
url,
method: 'PUT',
responseType: 'text',
responseType: 'readable',
headers: {
'Content-Type': 'application/octet-stream',
// 'Connection': 'close',
@@ -553,12 +440,6 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
forwarder.killPromise.finally(() => {
this.console.log('audio finished');
passthrough.end();
setTimeout(() => {
this.stopIntercom();
}, 1000);
});
put.finally(() => {
this.stopIntercom();
});
@@ -567,7 +448,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
if (response.statusCode !== 200)
forwarder.kill();
})
.catch(() => forwarder.kill());
.catch(() => forwarder.kill());
}
async stopIntercom(): Promise<void> {
@@ -700,4 +581,4 @@ class HikvisionProvider extends RtspProvider {
}
}
export default HikvisionProvider;
export default new HikvisionProvider();

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/onvif",
"version": "0.1.13",
"version": "0.1.10",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/onvif",
"version": "0.1.13",
"version": "0.1.10",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/onvif",
"version": "0.1.13",
"version": "0.1.10",
"description": "ONVIF Camera Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -97,7 +97,7 @@ export class OnvifIntercom implements Intercom {
this.camera.console.log('backchannel transport', transportDict);
const availableMatches = audioBackchannel.rtpmaps.filter(rtpmap => rtpmap.ffmpegEncoder);
const defaultMatch = audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder === 'pcm_mulaw') || audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder);
const defaultMatch = audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder);
if (!defaultMatch)
throw new Error('no supported codec was found for back channel');
@@ -151,7 +151,7 @@ export class OnvifIntercom implements Intercom {
}
const elapsedRtpTimeMs = Math.abs(pending.header.timestamp - p.header.timestamp) / 8000 * 1000;
if (elapsedRtpTimeMs <= 160) {
if (elapsedRtpTimeMs <= 60) {
pending.payload = Buffer.concat([pending.payload, p.payload]);
return;
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/openvino",
"version": "0.1.55",
"version": "0.1.54",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/openvino",
"version": "0.1.55",
"version": "0.1.54",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.55"
"version": "0.1.54"
}

View File

@@ -79,19 +79,14 @@ class OpenVINOPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.S
self.yolo = 'yolo' in model
self.yolov8 = "yolov8" in model
self.yolov9 = "yolov9" in model
self.scrypted_model = "scrypted" in model
self.sigmoid = model == 'yolo-v4-tiny-tf'
print(f'model/mode/precision: {model}/{mode}/{precision}')
ovmodel = 'best' if self.scrypted_model else model
model_version = 'v4'
xmlFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml', f'{model_version}/{precision}/{ovmodel}.xml')
binFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin', f'{model_version}/{precision}/{ovmodel}.bin')
if self.scrypted_model:
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/scrypted_labels.txt', 'scrypted_labels.txt')
elif self.yolo:
xmlFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{model}.xml', f'{model_version}/{precision}/{model}.xml')
binFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{model}.bin', f'{model_version}/{precision}/{model}.bin')
if self.yolo:
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/coco_80cl.txt', 'coco_80cl.txt')
else:
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/coco_labels.txt', 'coco_labels.txt')
@@ -137,14 +132,13 @@ class OpenVINOPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.S
'description': 'The detection model used to find objects.',
'choices': [
'Default',
'scrypted_yolov8n_320',
'yolov8n_320',
'yolov9c_320',
'ssd_mobilenet_v1_coco',
'ssdlite_mobilenet_v2',
'yolo-v3-tiny-tf',
'yolo-v4-tiny-tf',
'yolov8n',
'yolov8n_320',
'yolov9c_320',
],
'value': model,
},

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.96",
"version": "0.1.95",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/python-codecs",
"version": "0.1.96",
"version": "0.1.95",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.96",
"version": "0.1.95",
"description": "Python Codecs for Scrypted",
"keywords": [
"scrypted",

View File

@@ -1,7 +1,6 @@
import asyncio
import time
import traceback
import os
from typing import Any, AsyncGenerator, List, Union
import scrypted_sdk
@@ -203,7 +202,7 @@ def multiprocess_exit():
class CodecFork:
def timeoutExit(self):
def timeoutExit():
print("Frame yield timed out, exiting pipeline.")
multiprocess_exit()

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/reolink",
"version": "0.0.65",
"version": "0.0.63",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/reolink",
"version": "0.0.65",
"version": "0.0.63",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/reolink",
"version": "0.0.65",
"version": "0.0.63",
"description": "Reolink Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/snapshot",
"version": "0.2.43",
"version": "0.2.40",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/snapshot",
"version": "0.2.43",
"version": "0.2.40",
"dependencies": {
"@types/node": "^20.10.6",
"sharp": "^0.33.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/snapshot",
"version": "0.2.43",
"version": "0.2.40",
"description": "Snapshot Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -1,6 +1,5 @@
import sdk, { BufferConverter, Image, ImageOptions, MediaObject, MediaObjectOptions, ScryptedDeviceBase, ScryptedMimeTypes } from "@scrypted/sdk";
import type sharp from 'sharp';
import type { KernelEnum } from "sharp";
let hasLoadedSharp = false;
let sharpInstance: typeof sharp;
@@ -9,6 +8,8 @@ export function loadSharp() {
hasLoadedSharp = true;
try {
sharpInstance = require('sharp');
// not exposed by sharp but it exists.
(sharpInstance.kernel as any).linear = 'linear';
console.log('sharp loaded');
}
catch (e) {
@@ -59,8 +60,11 @@ export class VipsImage implements Image {
});
}
if (options?.resize) {
let kernel: keyof KernelEnum;
let kernel: string;
switch (options?.resize.filter) {
case 'bilinear':
kernel = 'linear';
break;
case 'lanczos':
kernel = 'lanczos2';
break;
@@ -71,12 +75,12 @@ export class VipsImage implements Image {
kernel = 'nearest';
break;
default:
kernel = 'cubic';
kernel = 'linear';
break
}
transformed.resize(typeof options.resize.width === 'number' ? Math.floor(options.resize.width) : undefined, typeof options.resize.height === 'number' ? Math.floor(options.resize.height) : undefined, {
fit: "cover",
kernel: kernel,
kernel: kernel as any,
});
}
@@ -128,21 +132,13 @@ export class VipsImage implements Image {
}
}
export async function loadVipsMetadata(data: Buffer | string) {
export async function loadVipsImage(data: Buffer | string, sourceId: string) {
loadSharp();
const image = sharpInstance(data, {
failOn: 'none'
});
const metadata = await image.metadata();
return {
image,
metadata,
}
}
export async function loadVipsImage(data: Buffer | string, sourceId: string) {
loadSharp();
const { image, metadata } = await loadVipsMetadata(data);
const vipsImage = new VipsImage(image, metadata, sourceId);
return vipsImage;
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.1.49",
"version": "0.1.48",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.1.49",
"version": "0.1.48",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -53,5 +53,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.49"
"version": "0.1.48"
}

View File

@@ -79,16 +79,10 @@ class TensorFlowLitePlugin(
model = "efficientdet_lite0_320_ptq"
self.yolo = "yolo" in model
self.yolov8 = "yolov8" in model
self.scrypted_model = "scrypted" in model
print(f'model: {model}')
if self.scrypted_model:
labelsFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/tflite-models/{branch}/scrypted_labels.txt",
f"{model_version}/scrypted_labels.txt",
)
elif self.yolo:
if self.yolo:
labelsFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/tflite-models/{branch}/coco_80cl.txt",
f"{model_version}/coco_80cl.txt",
@@ -106,10 +100,9 @@ class TensorFlowLitePlugin(
self.interpreter_count = 0
def downloadModel():
tflite_model = "best_full_integer_quant" if self.scrypted_model else model
return self.downloadFile(
f"https://github.com/koush/tflite-models/raw/{branch}/{model}/{tflite_model}{suffix}.tflite",
f"{model_version}/{tflite_model}{suffix}.tflite",
f"https://github.com/koush/tflite-models/raw/{branch}/{model}/{model}{suffix}.tflite",
f"{model_version}/{model}{suffix}.tflite",
)
try:
@@ -182,7 +175,6 @@ class TensorFlowLitePlugin(
"ssdlite_mobiledet_coco_qat_postprocess",
"yolov8n_full_integer_quant",
"yolov8n_full_integer_quant_320",
"scrypted_yolov8n_320",
"efficientdet_lite0_320_ptq",
"efficientdet_lite1_384_ptq",
"efficientdet_lite2_448_ptq",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/webrtc",
"version": "0.2.18",
"version": "0.2.17",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/webrtc",
"version": "0.2.18",
"version": "0.2.17",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/webrtc",
"version": "0.2.18",
"version": "0.2.17",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -406,7 +406,6 @@ class WebRTCTrack implements RTCOutputMediaObjectTrack, RTCInputMediaObjectTrack
constructor(public connectionManagement: WebRTCConnectionManagement, public video: RTCRtpTransceiver, public audio: RTCRtpTransceiver, intercom: Intercom) {
this.control = new ScryptedSessionControl(intercom, audio);
this.connectionManagement.activeTracks.add(this);
}
async onStop(): Promise<void> {
@@ -624,7 +623,7 @@ export class WebRTCConnectionManagement implements RTCConnectionManagement {
async close(): Promise<void> {
for (const track of this.activeTracks) {
track.cleanup(false);
track.cleanup(true);
}
this.activeTracks.clear();
this.pc.close();
@@ -663,7 +662,7 @@ export async function createRTCPeerConnectionSink(
});
track.control.killed.promise.then(() => {
track.cleanup(false);
track.cleanup(true);
connection.pc.close();
});

View File

@@ -627,10 +627,9 @@ export async function fork() {
const cleanup = new Deferred<string>();
cleanup.promise.catch(e => this.console.log('cleaning up rtc connection:', e.message));
cleanup.promise.finally(() => setTimeout(() => process.exit(), 30000));
cleanup.promise.finally(() => setTimeout(() => process.exit(), 10000));
const connection = new WebRTCConnectionManagement(console, clientSession, maximumCompatibilityMode, clientOptions, options);
cleanup.promise.finally(() => connection.close().catch(() => { }));
const { pc } = connection;
waitClosed(pc).then(() => cleanup.resolve('peer connection closed'));

View File

@@ -3,7 +3,7 @@ const { PortablePython } = require('py')
const { once } = require('events');
module.exports = {
version: '3.10',
version: '3.11',
}
async function pipInstall(python, pkg) {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/server",
"version": "0.97.0",
"version": "0.94.44",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/server",
"version": "0.97.0",
"version": "0.94.44",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/server",
"version": "0.97.0",
"version": "0.95.0",
"description": "",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",

View File

@@ -56,7 +56,7 @@ class DeviceProxy(object):
def __init__(self, systemManager: SystemManager, id: str):
self.systemManager = systemManager
self.id = id
self.device: asyncio.Future[rpc.RpcProxy] = None
self.device: asyncio.Future[rpc.RpcPeer] = None
def __getattr__(self, name):
if name == 'id':
@@ -81,7 +81,7 @@ class DeviceProxy(object):
def __apply__(self, method: str, args: list):
if not self.device:
self.device = asyncio.ensure_future(self.systemManager.api.getDeviceById(self.id))
self.device = self.systemManager.api.getDeviceById(self.id)
async def apply():
device = await self.device

View File

@@ -93,21 +93,11 @@ export async function httpFetch<T extends HttpFetchOptions<Readable>>(options: T
}
}
let controller: AbortController;
let timeout: NodeJS.Timeout;
if (options.timeout) {
controller = new AbortController();
timeout = setTimeout(() => controller.abort(), options.timeout);
options.signal?.addEventListener('abort', () => controller.abort('abort'));
}
const request = proto.request(url, {
method: getFetchMethod(options),
rejectUnauthorized: options.rejectUnauthorized,
family: options.family,
headers: nodeHeaders,
signal: controller?.signal || options.signal,
timeout: options.timeout,
});
@@ -115,36 +105,30 @@ export async function httpFetch<T extends HttpFetchOptions<Readable>>(options: T
body.pipe(request);
else
request.end();
const [response] = await once(request, 'response') as [IncomingMessage];
try {
const [response] = await once(request, 'response') as [IncomingMessage];
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.statusCode);
}
catch (e) {
readMessageBuffer(response).catch(() => { });
throw e;
}
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.statusCode);
}
const incomingHeaders = new Headers();
for (const [k, v] of Object.entries(response.headers)) {
for (const vv of (typeof v === 'string' ? [v] : v)) {
incomingHeaders.append(k, vv)
}
catch (e) {
readMessageBuffer(response).catch(() => { });
throw e;
}
}
return {
statusCode: response.statusCode,
headers: incomingHeaders,
body: await httpFetchParseIncomingMessage(response, options.responseType),
};
}
finally {
clearTimeout(timeout);
const incomingHeaders = new Headers();
for (const [k, v] of Object.entries(response.headers)) {
for (const vv of (typeof v === 'string' ? [v] : v)) {
incomingHeaders.append(k, vv)
}
}
return {
statusCode: response.statusCode,
headers: incomingHeaders,
body: await httpFetchParseIncomingMessage(response, options.responseType),
};
}
function ensureType<T>(v: T) {

View File

@@ -5,7 +5,6 @@ export interface HttpFetchOptionsBase<B> {
family?: 4 | 6;
method?: string;
headers?: HeadersInit;
signal?: AbortSignal,
timeout?: number;
rejectUnauthorized?: boolean;
ignoreStatusCode?: boolean;
@@ -124,44 +123,30 @@ export async function domFetch<T extends HttpFetchOptions<BodyInit>>(options: T)
body = createStringOrBufferBody(headers, body);
}
let controller: AbortController;
let timeout: NodeJS.Timeout;
if (options.timeout) {
controller = new AbortController();
timeout = setTimeout(() => controller.abort(), options.timeout);
const { url } = options;
const response = await fetch(url, {
method: getFetchMethod(options),
credentials: options.withCredentials ? 'include' : undefined,
headers,
signal: options.timeout ? AbortSignal.timeout(options.timeout) : undefined,
body,
});
options.signal?.addEventListener('abort', () => controller.abort('abort'));
}
try {
const { url } = options;
const response = await fetch(url, {
method: getFetchMethod(options),
credentials: options.withCredentials ? 'include' : undefined,
headers,
signal: controller?.signal || options.signal,
body,
});
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.status);
}
catch (e) {
response.arrayBuffer().catch(() => { });
throw e;
}
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.status);
}
catch (e) {
response.arrayBuffer().catch(() => { });
throw e;
}
}
return {
statusCode: response.status,
headers: response.headers,
body: await domFetchParseIncomingMessage(response, options.responseType),
};
}
finally {
clearTimeout(timeout);
}
return {
statusCode: response.status,
headers: response.headers,
body: await domFetchParseIncomingMessage(response, options.responseType),
};
}
function ensureType<T>(v: T) {