Compare commits

..

28 Commits

Author SHA1 Message Date
Koushik Dutta
0b26f4df39 snapshot: avoid internal api 2024-03-30 20:06:02 -07:00
Koushik Dutta
be98083557 webrtc: fix ffmpeg leaks? 2024-03-29 23:32:15 -07:00
Koushik Dutta
f4dcb8e662 openvino: publish new models 2024-03-29 23:16:23 -07:00
Koushik Dutta
45186316a6 tensorflow-lite: publish new models 2024-03-29 23:03:12 -07:00
Koushik Dutta
c6e6c881fe coreml: update models. publish. 2024-03-29 22:52:44 -07:00
Koushik Dutta
62b07ea609 core: fix node upgrade 2024-03-29 13:08:41 -07:00
Koushik Dutta
a00ae60ab0 ha/proxmox: bump versions 2024-03-29 12:53:44 -07:00
Brett Jia
878753a526 server: treat self.device as future (#1401)
* server: treat self.device as future

* simplify

* modify annotation

* modify annotation
2024-03-28 19:36:14 -07:00
Koushik Dutta
3c1801ad01 cameras: fix signal + timeout combined usage 2024-03-27 22:05:05 -07:00
Koushik Dutta
30f9e358b7 cameras: fix fetch timeout bugs 2024-03-27 22:02:26 -07:00
Koushik Dutta
456faea1fd amcrest: fix two way audio termination 2024-03-27 15:39:42 -07:00
Koushik Dutta
5e58b1426e amcrest: fix two way audio termination 2024-03-27 15:39:21 -07:00
Koushik Dutta
ec6d617c09 cameras: update onvif two way 2024-03-27 12:21:41 -07:00
Koushik Dutta
1238abedb1 hikvision: fix events crossing streams in camera channels 2024-03-27 10:02:33 -07:00
Koushik Dutta
3e18b9e6aa amcrest: fix vehicle detection class 2024-03-26 19:29:58 -07:00
Koushik Dutta
dce76b5d87 amcrest: fix object detector types 2024-03-26 18:13:16 -07:00
Koushik Dutta
de645dfacb hikvision: add vehicle support 2024-03-26 14:08:34 -07:00
Koushik Dutta
6fd66db896 amcrest/hikvision: add support for smart detections. publish. 2024-03-26 10:41:32 -07:00
Koushik Dutta
62850163d7 hikvision: implement smart events 2024-03-25 23:22:02 -07:00
Koushik Dutta
b46a385a81 onvif: increase 2 way audio buffer to reduce stutter. 2024-03-25 13:28:29 -07:00
Koushik Dutta
c94fb231c6 cli: fix updater 2024-03-25 12:45:10 -07:00
Koushik Dutta
a3df934a88 chromecast: fix audio playback 2024-03-25 12:44:25 -07:00
Koushik Dutta
a6143e103e postrelease 2024-03-25 12:09:11 -07:00
Koushik Dutta
df705cb0e7 server: rollback portable python to 3.10 2024-03-25 12:09:00 -07:00
Koushik Dutta
6e7f291f81 hikvision: fix two way audio duration 2024-03-25 11:02:58 -07:00
Koushik Dutta
fa5b9f66db python-codecs: Fix process exit leak 2024-03-23 17:45:38 -07:00
Koushik Dutta
f760840a6d ha: publish 2024-03-23 12:48:30 -07:00
Koushik Dutta
f36ee6ccb5 postrelease
postrelease

postrelease
2024-03-23 12:34:40 -07:00
51 changed files with 802 additions and 291 deletions

View File

@@ -136,12 +136,17 @@ export async function readLine(readable: Readable) {
}
export async function readString(readable: Readable | Promise<Readable>) {
let data = '';
const buffer = await readBuffer(readable);
return buffer.toString();
}
export async function readBuffer(readable: Readable | Promise<Readable>) {
const buffers: Buffer[] = [];
readable = await readable;
readable.on('data', buffer => {
data += buffer.toString();
buffers.push(buffer);
});
readable.resume();
await once(readable, 'end')
return data;
return Buffer.concat(buffers);
}

View File

@@ -1,6 +1,6 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "18-jammy-full.s6-v0.93.0"
version: "20-jammy-full.s6-v0.96.0"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"

View File

@@ -10,7 +10,7 @@ function readyn() {
}
cd /tmp
SCRYPTED_VERSION=v0.93.0
SCRYPTED_VERSION=v0.96.0
SCRYPTED_TAR_ZST=scrypted-$SCRYPTED_VERSION.tar.zst
if [ -z "$VMID" ]
then

View File

@@ -1,12 +1,12 @@
{
"name": "scrypted",
"version": "1.3.13",
"version": "1.3.14",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "scrypted",
"version": "1.3.13",
"version": "1.3.14",
"license": "ISC",
"dependencies": {
"@scrypted/client": "^1.3.3",

View File

@@ -1,6 +1,6 @@
{
"name": "scrypted",
"version": "1.3.13",
"version": "1.3.14",
"description": "",
"main": "./dist/packages/cli/src/main.js",
"bin": {

View File

@@ -133,11 +133,7 @@ export async function serveMain(installVersion?: string) {
await startServer(installDir);
if (fs.existsSync(EXIT_FILE)) {
console.log('Exiting.');
process.exit(1);
}
else if (fs.existsSync(UPDATE_FILE)) {
if (fs.existsSync(UPDATE_FILE)) {
console.log('Update requested. Installing.');
await runCommandEatError('npm', '--prefix', installDir, 'install', '--production', '@scrypted/server@latest').catch(e => {
console.error('Update failed', e);
@@ -145,6 +141,10 @@ export async function serveMain(installVersion?: string) {
console.log('Exiting.');
process.exit(1);
}
else if (fs.existsSync(EXIT_FILE)) {
console.log('Exiting.');
process.exit(1);
}
else {
console.log(`Service unexpectedly exited. Restarting momentarily.`);
await sleep(10000);

View File

@@ -10,7 +10,7 @@
"port": 10081,
"request": "attach",
"skipFiles": [
"**/plugin-remote-worker.*",
"**/plugin-console.*",
"<node_internals>/**"
],
"preLaunchTask": "scrypted: deploy+debug",

View File

@@ -1,19 +1,21 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.135",
"version": "0.0.144",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.135",
"version": "0.0.144",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk"
"@scrypted/sdk": "file:../../sdk",
"content-type": "^1.0.5"
},
"devDependencies": {
"@types/node": "^20.10.8"
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
}
},
"../../common": {
@@ -23,23 +25,22 @@
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"http-auth-utils": "^5.0.1",
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.10.8",
"@types/node": "^20.11.0",
"ts-node": "^10.9.2"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.4",
"version": "0.3.29",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"axios": "^1.6.5",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
@@ -77,15 +78,29 @@
"resolved": "../../sdk",
"link": true
},
"node_modules/@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"node_modules/@types/node": {
"version": "20.10.8",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.8.tgz",
"integrity": "sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==",
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.135",
"version": "0.0.144",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -36,9 +36,11 @@
},
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk"
"@scrypted/sdk": "file:../../sdk",
"content-type": "^1.0.5"
},
"devDependencies": {
"@types/node": "^20.10.8"
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
}
}

View File

@@ -1,6 +1,74 @@
import { AuthFetchCredentialState, HttpFetchOptions, authHttpFetch } from '@scrypted/common/src/http-auth-fetch';
import { Readable } from 'stream';
import { readLine } from '@scrypted/common/src/read-stream';
import { parseHeaders, readBody, readMessage } from '@scrypted/common/src/rtsp-server';
import contentType from 'content-type';
import { IncomingMessage } from 'http';
import { EventEmitter, Readable } from 'stream';
import { Destroyable } from '../../rtsp/src/rtsp';
import { getDeviceInfo } from './probe';
import { Point } from '@scrypted/sdk';
// {
// "Action" : "Cross",
// "Class" : "Normal",
// "CountInGroup" : 1,
// "DetectRegion" : [
// [ 455, 260 ],
// [ 3586, 260 ],
// [ 3768, 7580 ],
// [ 382, 7451 ]
// ],
// "Direction" : "Enter",
// "EventID" : 10181,
// "GroupID" : 0,
// "Name" : "Rule1",
// "Object" : {
// "Action" : "Appear",
// "BoundingBox" : [ 2856, 1280, 3880, 4880 ],
// "Center" : [ 3368, 3080 ],
// "Confidence" : 0,
// "LowerBodyColor" : [ 0, 0, 0, 0 ],
// "MainColor" : [ 0, 0, 0, 0 ],
// "ObjectID" : 863,
// "ObjectType" : "Human",
// "RelativeID" : 0,
// "Speed" : 0
// },
// "PTS" : 43380319830.0,
// "RuleID" : 2,
// "Track" : [],
// "UTC" : 1711446999,
// "UTCMS" : 701
// }
export interface AmcrestObjectDetails {
Action: string;
BoundingBox: Point;
Center: Point;
Confidence: number;
LowerBodyColor: [number, number, number, number];
MainColor: [number, number, number, number];
ObjectID: number;
ObjectType: string;
RelativeID: number;
Speed: number;
}
export interface AmcrestEventData {
Action: string;
Class: string;
CountInGroup: number;
DetectRegion: Point[];
Direction: string;
EventID: number;
GroupID: number;
Name: string;
Object: AmcrestObjectDetails;
PTS: number;
RuleID: number;
Track: any[];
UTC: number;
UTCMS: number;
}
export enum AmcrestEvent {
MotionStart = "Code=VideoMotion;action=Start",
@@ -18,6 +86,10 @@ export enum AmcrestEvent {
DahuaTalkHangup = "Code=PassiveHungup;action=Start",
DahuaCallDeny = "Code=HungupPhone;action=Pulse",
DahuaTalkPulse = "Code=_CallNoAnswer_;action=Pulse",
SmartMotionHuman = "Code=SmartMotionHuman;action=Start",
SmartMotionVehicle = "Code=Vehicle;action=Start",
CrossLineDetection = "Code=CrossLineDetection;action=Start",
CrossRegionDetection = "Code=CrossRegionDetection;action=Start",
}
export class AmcrestCameraClient {
@@ -78,7 +150,8 @@ export class AmcrestCameraClient {
return response.body;
}
async listenEvents() {
async listenEvents(): Promise<Destroyable> {
const events = new EventEmitter();
const url = `http://${this.ip}/cgi-bin/eventManager.cgi?action=attach&codes=[All]`;
console.log('preparing event listener', url);
@@ -86,32 +159,102 @@ export class AmcrestCameraClient {
url,
responseType: 'readable',
});
const stream = response.body;
const stream: IncomingMessage = response.body;
(events as any).destroy = () => {
stream.destroy();
events.removeAllListeners();
};
stream.on('close', () => {
events.emit('close');
});
stream.on('end', () => {
events.emit('end');
});
stream.on('error', e => {
events.emit('error', e);
});
stream.socket.setKeepAlive(true);
stream.on('data', (buffer: Buffer) => {
const data = buffer.toString();
const parts = data.split(';');
let index: string;
try {
for (const part of parts) {
if (part.startsWith('index')) {
index = part.split('=')[1]?.trim();
const ct = stream.headers['content-type'];
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
(async () => {
while (true) {
let ignore = await readLine(stream);
ignore = ignore.trim();
if (!ignore)
continue;
if (ignore === boundaryEnd)
continue;
if (ignore !== boundary) {
this.console.error('expected boundary but found', ignore);
throw new Error('expected boundary');
}
const message = await readMessage(stream);
events.emit('data', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
const data = body.toString();
events.emit('data', data);
const parts = data.split(';');
let index: string;
try {
for (const part of parts) {
if (part.startsWith('index')) {
index = part.split('=')[1]?.trim();
}
}
}
catch (e) {
this.console.error('error parsing index', data);
}
let jsonData: any;
try {
for (const part of parts) {
if (part.startsWith('data')) {
jsonData = JSON.parse(part.split('=')[1]?.trim());
}
}
}
catch (e) {
this.console.error('error parsing data', data);
}
for (const event of Object.values(AmcrestEvent)) {
if (data.indexOf(event) !== -1) {
events.emit('event', event, index, data);
if (event === AmcrestEvent.SmartMotionHuman) {
events.emit('smart', 'person', jsonData);
}
else if (event === AmcrestEvent.SmartMotionVehicle) {
events.emit('smart', 'car', jsonData);
}
else if (event === AmcrestEvent.CrossLineDetection || event === AmcrestEvent.CrossRegionDetection) {
const eventData: AmcrestEventData = jsonData;
if (eventData?.Object?.ObjectType === 'Human') {
events.emit('smart', 'person', eventData);
}
else if (eventData?.Object?.ObjectType === 'Vehicle') {
events.emit('smart', 'car', eventData);
}
}
}
}
}
catch (e) {
this.console.error('error parsing index', data);
}
// this.console?.log('event', data);
for (const event of Object.values(AmcrestEvent)) {
if (data.indexOf(event) !== -1) {
stream.emit('event', event, index, data);
}
}
});
return stream;
})()
.catch(() => stream.destroy());
return events as any as Destroyable;
}
async enableContinousRecording(channel: number) {

View File

@@ -1,11 +1,11 @@
import { ffmpegLogInitialOutput } from '@scrypted/common/src/media-helpers';
import { readLength } from "@scrypted/common/src/read-stream";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, Lock, MediaObject, MediaStreamOptions, Reboot, RequestPictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, Lock, MediaObject, MediaStreamOptions, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, Reboot, RequestPictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import child_process, { ChildProcess } from 'child_process';
import { PassThrough, Readable, Stream } from "stream";
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { AmcrestCameraClient, AmcrestEvent } from "./amcrest-api";
import { AmcrestCameraClient, AmcrestEvent, AmcrestEventData } from "./amcrest-api";
const { mediaManager } = sdk;
@@ -22,12 +22,13 @@ function findValue(blob: string, prefix: string, key: string) {
return parts[1];
}
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, Lock, VideoRecorder, Reboot {
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, Lock, VideoRecorder, Reboot, ObjectDetector {
eventStream: Stream;
cp: ChildProcess;
client: AmcrestCameraClient;
videoStreamOptions: Promise<UrlMediaStreamOptions[]>;
onvifIntercom = new OnvifIntercom(this);
hasSmartDetection: boolean;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
@@ -36,6 +37,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
this.storage.removeItem('amcrestDoorbell');
}
this.hasSmartDetection = this.storage.getItem('hasSmartDetection') === 'true';
this.updateDevice();
this.updateDeviceInfo();
}
@@ -184,7 +186,11 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
if (idx.toString() !== channelNumber)
return;
}
if (event === AmcrestEvent.MotionStart) {
if (event === AmcrestEvent.MotionStart
|| event === AmcrestEvent.SmartMotionHuman
|| event === AmcrestEvent.SmartMotionVehicle
|| event === AmcrestEvent.CrossLineDetection
|| event === AmcrestEvent.CrossRegionDetection) {
this.motionDetected = true;
resetMotionTimeout();
}
@@ -231,9 +237,42 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
});
events.on('smart', (className: string, data: AmcrestEventData) => {
if (!this.hasSmartDetection) {
this.hasSmartDetection = true;
this.storage.setItem('hasSmartDetection', 'true');
this.updateDevice();
}
const detected: ObjectsDetected = {
timestamp: Date.now(),
detections: [
{
score: 1,
className,
}
],
};
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detected);
});
return events;
}
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
return;
}
async getObjectTypes(): Promise<ObjectDetectionTypes> {
return {
classes: [
'person',
'car',
],
}
}
async getOtherSettings(): Promise<Setting[]> {
const ret = await super.getOtherSettings();
ret.push(
@@ -472,13 +511,19 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
if (isDoorbell || twoWayAudio) {
interfaces.push(ScryptedInterface.Intercom);
}
const enableDahuaLock = this.storage.getItem('enableDahuaLock') === 'true';
if (isDoorbell && doorbellType === DAHUA_DOORBELL_TYPE && enableDahuaLock) {
interfaces.push(ScryptedInterface.Lock);
}
const continuousRecording = this.storage.getItem('continuousRecording') === 'true';
if (continuousRecording)
interfaces.push(ScryptedInterface.VideoRecorder);
if (this.hasSmartDetection)
interfaces.push(ScryptedInterface.ObjectDetector);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
@@ -521,7 +566,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
const doorbellType = this.storage.getItem('doorbellType');
// not sure if this all works, since i don't actually have a doorbell.
// good luck!
const channel = this.getRtspChannel() || '1';
@@ -548,12 +593,22 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
else {
args.push(
"-vn",
'-acodec', 'aac',
'-f', 'adts',
'pipe:3',
);
"-vn",
'-acodec', 'aac',
'-f', 'adts',
'pipe:3',
);
contentType = 'Audio/AAC';
// args.push(
// "-vn",
// '-acodec', 'pcm_mulaw',
// '-ac', '1',
// '-ar', '8000',
// '-sample_fmt', 's16',
// '-f', 'mulaw',
// 'pipe:3',
// );
// contentType = 'Audio/G.711A';
}
this.console.log('ffmpeg intercom', args);
@@ -573,15 +628,19 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
// seems the dahua doorbells preferred 1024 chunks. should investigate adts
// parsing and sending multipart chunks instead.
const passthrough = new PassThrough();
const abortController = new AbortController();
this.getClient().request({
url,
method: 'POST',
headers: {
'Content-Type': contentType,
'Content-Length': '9999999'
'Content-Length': '9999999',
},
signal: abortController.signal,
responseType: 'readable',
}, passthrough);
}, passthrough)
.catch(() => { })
.finally(() => this.console.log('request finished'))
try {
while (true) {
@@ -593,7 +652,8 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
finally {
this.console.log('audio finished');
passthrough.end();
passthrough.destroy();
abortController.abort();
}
this.stopIntercom();

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/chromecast",
"version": "0.1.57",
"version": "0.1.58",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/chromecast",
"version": "0.1.57",
"version": "0.1.58",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/chromecast",
"version": "0.1.57",
"version": "0.1.58",
"description": "Send video, audio, and text to speech notifications to Chromecast and Google Home devices",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -183,7 +183,7 @@ class CastDevice extends ScryptedDeviceBase implements MediaPlayer, Refresh, Eng
media = await mediaManager.createMediaObjectFromUrl(media);
}
}
else if (options?.mimeType?.startsWith('image/')) {
else if (options?.mimeType?.startsWith('image/') || options?.mimeType?.startsWith('audio/')) {
url = await mediaManager.convertMediaObjectToInsecureLocalUrl(media, options?.mimeType);
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.3.18",
"version": "0.3.19",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.3.18",
"version": "0.3.19",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.3.18",
"version": "0.3.19",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -11,7 +11,7 @@ export async function checkLxcDependencies() {
let needRestart = false;
if (!process.version.startsWith('v20.')) {
const cp = child_process.spawn('sh', ['-c', 'curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && apt install -y nodejs']);
const cp = child_process.spawn('sh', ['-c', 'apt update -y && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && apt install -y nodejs']);
const [exitCode] = await once(cp, 'exit');
if (exitCode !== 0)
sdk.log.a('Failed to install Node.js 20.x.');

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/coreml",
"version": "0.1.29",
"version": "0.1.30",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.29",
"version": "0.1.30",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.29"
"version": "0.1.30"
}

View File

@@ -12,13 +12,14 @@ from PIL import Image
from scrypted_sdk import Setting, SettingValue
import yolo
import ast
from predict import Prediction, PredictPlugin, Rectangle
predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "CoreML-Predict")
def parse_label_contents(contents: str):
lines = contents.splitlines()
lines = contents.split(',')
ret = {}
for row_number, content in enumerate(lines):
pair = re.split(r"[:\s]+", content.strip(), maxsplit=1)
@@ -29,6 +30,20 @@ def parse_label_contents(contents: str):
return ret
def parse_labels(userDefined):
yolo = userDefined.get("names") or userDefined.get("yolo.names")
if yolo:
j = ast.literal_eval(yolo)
ret = {}
for k, v in j.items():
ret[int(k)] = v
return ret
classes = userDefined.get("classes")
if not classes:
raise Exception("no classes found in model metadata")
return parse_label_contents(classes)
class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
@@ -39,30 +54,23 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
self.yolo = "yolo" in model
self.yolov8 = "yolov8" in model
self.yolov9 = "yolov9" in model
model_version = "v2"
self.scrypted_model = "scrypted" in model
model_version = "v3"
mlmodel = 'model' if self.yolov8 or self.yolov9 else model
print(f"model: {model}")
if not self.yolo:
# todo convert these to mlpackage
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_labels.txt",
"coco_labels.txt",
)
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"https://github.com/koush/coreml-models/raw/main/{model}/{mlmodel}.mlmodel",
f"{model}.mlmodel",
)
else:
if self.yolov8:
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
elif self.yolov9:
if self.yolov8 or self.yolov9:
files = [
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
@@ -77,7 +85,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/FeatureDescriptions.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/Metadata.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
@@ -88,11 +96,6 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
)
modelFile = os.path.dirname(p)
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_80cl.txt",
f"{model_version}/{model}/coco_80cl.txt",
)
self.model = ct.models.MLModel(modelFile)
self.modelspec = self.model.get_spec()
@@ -100,10 +103,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
self.inputheight = self.inputdesc.type.imageType.height
self.inputwidth = self.inputdesc.type.imageType.width
labels_contents = open(labelsFile, "r").read()
self.labels = parse_label_contents(labels_contents)
# csv in mobilenet model
# self.modelspec.description.metadata.userDefined['classes']
self.labels = parse_labels(self.modelspec.description.metadata.userDefined)
self.loop = asyncio.get_event_loop()
self.minThreshold = 0.2
@@ -116,11 +116,13 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
"description": "The detection model used to find objects.",
"choices": [
"Default",
"ssdlite_mobilenet_v2",
"yolov4-tiny",
"yolov8n",
"scrypted_yolov8n_320",
"yolov8n_320",
"yolov9c_320",
"ssdlite_mobilenet_v2",
"yolov8n",
"yolov9c",
"yolov4-tiny",
],
"value": model,
},

View File

@@ -1,22 +1,23 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.137",
"version": "0.0.146",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/hikvision",
"version": "0.0.137",
"version": "0.0.146",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.11",
"lodash": "^4.17.21",
"xml2js": "^0.6.0"
"@types/xml2js": "^0.4.14",
"content-type": "^1.0.5",
"xml2js": "^0.6.2"
},
"devDependencies": {
"@types/node": "^18.15.11"
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
}
},
"../../common": {
@@ -27,17 +28,16 @@
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.10.8",
"@types/node": "^20.11.0",
"ts-node": "^10.9.2"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.4",
"version": "0.3.29",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -83,33 +83,50 @@
"resolved": "../../sdk",
"link": true
},
"node_modules/@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"node_modules/@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/@types/xml2js": {
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"node_modules/xml2js": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
@@ -133,9 +150,8 @@
"requires": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"@types/node": "^20.10.8",
"@types/node": "^20.11.0",
"http-auth-utils": "^5.0.1",
"node-fetch-commonjs": "^3.1.1",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
}
@@ -164,33 +180,47 @@
"webpack-bundle-analyzer": "^4.5.0"
}
},
"@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"@types/node": {
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"requires": {
"undici-types": "~5.26.4"
}
},
"@types/xml2js": {
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"requires": {
"@types/node": "*"
}
},
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
"content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"xml2js": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.137",
"version": "0.0.146",
"description": "Hikvision Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -37,11 +37,12 @@
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.11",
"lodash": "^4.17.21",
"xml2js": "^0.6.0"
"@types/xml2js": "^0.4.14",
"content-type": "^1.0.5",
"xml2js": "^0.6.2"
},
"devDependencies": {
"@types/node": "^18.15.11"
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
}
}

View File

@@ -1,8 +1,17 @@
import { AuthFetchCredentialState, HttpFetchOptions, authHttpFetch } from '@scrypted/common/src/http-auth-fetch';
import { readLine } from '@scrypted/common/src/read-stream';
import { parseHeaders, readBody, readMessage } from '@scrypted/common/src/rtsp-server';
import contentType from 'content-type';
import { IncomingMessage } from 'http';
import { Readable } from 'stream';
import { EventEmitter, Readable } from 'stream';
import { Destroyable } from '../../rtsp/src/rtsp';
import { getDeviceInfo } from './probe';
export const detectionMap = {
human: 'person',
vehicle: 'car',
}
export function getChannel(channel: string) {
return channel || '101';
}
@@ -15,6 +24,8 @@ export enum HikvisionCameraEvent {
// <eventType>linedetection</eventType>
// <eventState>inactive</eventState>
LineDetection = "<eventType>linedetection</eventType>",
RegionEntrance = "<eventType>regionEntrance</eventType>",
RegionExit = "<eventType>regionExit</eventType>",
// <eventType>fielddetection</eventType>
// <eventState>active</eventState>
// <eventType>fielddetection</eventType>
@@ -31,7 +42,7 @@ export interface HikvisionCameraStreamSetup {
export class HikvisionCameraAPI {
credential: AuthFetchCredentialState;
deviceModel: Promise<string>;
listenerPromise: Promise<IncomingMessage>;
listenerPromise: Promise<Destroyable>;
constructor(public ip: string, username: string, password: string, public console: Console) {
this.credential = {
@@ -129,35 +140,106 @@ export class HikvisionCameraAPI {
return response.body;
}
async listenEvents() {
async listenEvents(): Promise<Destroyable> {
const events = new EventEmitter();
(events as any).destroy = () => { };
// support multiple cameras listening to a single single stream
if (!this.listenerPromise) {
const url = `http://${this.ip}/ISAPI/Event/notification/alertStream`;
let lastSmartDetection: string;
this.listenerPromise = this.request({
url,
responseType: 'readable',
}).then(response => {
const stream = response.body;
const stream: IncomingMessage = response.body;
(events as any).destroy = () => {
stream.destroy();
events.removeAllListeners();
};
stream.on('close', () => {
this.listenerPromise = undefined;
events.emit('close');
});
stream.on('end', () => {
this.listenerPromise = undefined;
events.emit('end');
});
stream.on('error', e => {
this.listenerPromise = undefined;
events.emit('error', e);
});
stream.socket.setKeepAlive(true);
stream.on('data', (buffer: Buffer) => {
const data = buffer.toString();
for (const event of Object.values(HikvisionCameraEvent)) {
if (data.indexOf(event) !== -1) {
const cameraNumber = data.match(/<channelID>(.*?)</)?.[1] || data.match(/<dynChannelID>(.*?)</)?.[1];
const inactive = data.indexOf('<eventState>inactive</eventState>') !== -1;
stream.emit('event', event, cameraNumber, inactive, data);
const ct = stream.headers['content-type'];
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
(async () => {
while (true) {
let ignore = await readLine(stream);
ignore = ignore.trim();
if (!ignore)
continue;
if (ignore === boundaryEnd)
continue;
if (ignore !== boundary) {
this.console.error('expected boundary but found', ignore);
throw new Error('expected boundary');
}
const message = await readMessage(stream);
events.emit('data', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
try {
if (!headers['content-type'].includes('application/xml') && lastSmartDetection) {
if (!headers['content-type']?.startsWith('image/jpeg')) {
continue;
}
events.emit('smart', lastSmartDetection, body);
lastSmartDetection = undefined;
continue;
}
}
finally {
// is it possible that smart detections are sent without images?
// if so, flush this detection.
if (lastSmartDetection) {
events.emit('smart', lastSmartDetection);
}
}
const data = body.toString();
events.emit('data', data);
for (const event of Object.values(HikvisionCameraEvent)) {
if (data.indexOf(event) !== -1) {
const cameraNumber = data.match(/<channelID>(.*?)</)?.[1] || data.match(/<dynChannelID>(.*?)</)?.[1];
const inactive = data.indexOf('<eventState>inactive</eventState>') !== -1;
events.emit('event', event, cameraNumber, inactive, data);
if (event === HikvisionCameraEvent.LineDetection
|| event === HikvisionCameraEvent.RegionEntrance
|| event === HikvisionCameraEvent.RegionExit
|| event === HikvisionCameraEvent.FieldDetection) {
lastSmartDetection = data;
}
}
}
}
});
return stream;
})()
.catch(() => stream.destroy());
return events as any as Destroyable;
});
this.listenerPromise.catch(() => this.listenerPromise = undefined);
this.listenerPromise.then(stream => {
stream.on('close', () => this.listenerPromise = undefined);
stream.on('end', () => this.listenerPromise = undefined);
});
}
return this.listenerPromise;

View File

@@ -1,11 +1,12 @@
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting } from "@scrypted/sdk";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting } from "@scrypted/sdk";
import crypto from 'crypto';
import { PassThrough } from "stream";
import xml2js from 'xml2js';
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
import { HikvisionCameraAPI, HikvisionCameraEvent } from "./hikvision-camera-api";
import { HikvisionCameraAPI, HikvisionCameraEvent, detectionMap } from "./hikvision-camera-api";
const { mediaManager } = sdk;
@@ -15,15 +16,17 @@ function channelToCameraNumber(channel: string) {
return channel.substring(0, channel.length - 2);
}
class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboot {
class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboot, ObjectDetector {
detectedChannels: Promise<Map<string, MediaStreamOptions>>;
client: HikvisionCameraAPI;
onvifIntercom = new OnvifIntercom(this);
activeIntercom: Awaited<ReturnType<typeof startRtpForwarderProcess>>;
hasSmartDetection: boolean;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
this.hasSmartDetection = this.storage.getItem('hasSmartDetection') === 'true';
this.updateDevice();
this.updateDeviceInfo();
}
@@ -63,41 +66,52 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
let ignoreCameraNumber: boolean;
const motionTimeoutDuration = 20000;
// check if the camera+channel field is in use, and filter events.
const checkCameraNumber = async (cameraNumber: string) => {
// check if the camera+channel field is in use, and filter events.
if (this.getRtspChannel()) {
// it is possible to set it up to use a camera number
// on an nvr IP (which gives RTSP urls through the NVR), but then use a http port
// that gives a filtered event stream from only that camera.
// this this case, the camera numbers will not
// match as they will be always be "1".
// to detect that a camera specific endpoint is being used
// can look at the channel ids, and see if that camera number is found.
// this is different from the use case where the NVR or camera
// is using a port other than 80 (the default).
// could add a setting to have the user explicitly denote nvr usage
// but that is error prone.
const userCameraNumber = this.getCameraNumber();
if (ignoreCameraNumber === undefined && this.detectedChannels) {
const channelIds = (await this.detectedChannels).keys();
ignoreCameraNumber = true;
for (const id of channelIds) {
if (channelToCameraNumber(id) === userCameraNumber) {
ignoreCameraNumber = false;
break;
}
}
}
if (!ignoreCameraNumber && cameraNumber !== userCameraNumber) {
// this.console.error(`### Skipping motion event ${cameraNumber} != ${this.getCameraNumber()}`);
return false;
}
}
return true;
};
events.on('event', async (event: HikvisionCameraEvent, cameraNumber: string, inactive: boolean) => {
if (event === HikvisionCameraEvent.MotionDetected
|| event === HikvisionCameraEvent.LineDetection
|| event === HikvisionCameraEvent.RegionEntrance
|| event === HikvisionCameraEvent.RegionExit
|| event === HikvisionCameraEvent.FieldDetection) {
// check if the camera+channel field is in use, and filter events.
if (this.getRtspChannel()) {
// it is possible to set it up to use a camera number
// on an nvr IP (which gives RTSP urls through the NVR), but then use a http port
// that gives a filtered event stream from only that camera.
// this this case, the camera numbers will not
// match as they will be always be "1".
// to detect that a camera specific endpoint is being used
// can look at the channel ids, and see if that camera number is found.
// this is different from the use case where the NVR or camera
// is using a port other than 80 (the default).
// could add a setting to have the user explicitly denote nvr usage
// but that is error prone.
const userCameraNumber = this.getCameraNumber();
if (ignoreCameraNumber === undefined && this.detectedChannels) {
const channelIds = (await this.detectedChannels).keys();
ignoreCameraNumber = true;
for (const id of channelIds) {
if (channelToCameraNumber(id) === userCameraNumber) {
ignoreCameraNumber = false;
break;
}
}
}
if (!ignoreCameraNumber && cameraNumber !== userCameraNumber) {
// this.console.error(`### Skipping motion event ${cameraNumber} != ${this.getCameraNumber()}`);
return;
}
}
if (!await checkCameraNumber(cameraNumber))
return;
this.motionDetected = true;
clearTimeout(motionTimeout);
@@ -106,11 +120,107 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
this.motionDetected = false;
}, motionTimeoutDuration);
}
})
});
let inputDimensions: [number, number];
events.on('smart', async (data: string, image: Buffer) => {
if (!this.hasSmartDetection) {
this.hasSmartDetection = true;
this.storage.setItem('hasSmartDetection', 'true');
this.updateDevice();
}
const xml = await xml2js.parseStringPromise(data);
const [channelId] = xml.EventNotificationAlert.channelID;
if (!await checkCameraNumber(channelId)) {
this.console.warn('chann fail')
return;
}
const now = Date.now();
let detections: ObjectDetectionResult[] = xml.EventNotificationAlert?.DetectionRegionList?.map(region => {
const { DetectionRegionEntry } = region;
const dre = DetectionRegionEntry[0];
if (!DetectionRegionEntry)
return;
const { detectionTarget } = dre;
// const { TargetRect } = dre;
// const { X, Y, width, height } = TargetRect[0];
const [name] = detectionTarget;
return {
score: 1,
className: detectionMap[name] || name,
// boundingBox: [
// parseInt(X),
// parseInt(Y),
// parseInt(width),
// parseInt(height),
// ],
// movement: {
// moving: true,
// firstSeen: now,
// lastSeen: now,
// }
} as ObjectDetectionResult;
});
detections = detections?.filter(d => d);
if (!detections?.length)
return;
// if (inputDimensions === undefined && loadSharp()) {
// try {
// const { image: i, metadata } = await loadVipsMetadata(image);
// i.destroy();
// inputDimensions = [metadata.width, metadata.height];
// }
// catch (e) {
// inputDimensions = null;
// }
// finally {
// }
// }
let detectionId: string;
if (image) {
detectionId = crypto.randomBytes(4).toString('hex');
this.recentDetections.set(detectionId, image);
setTimeout(() => this.recentDetections.delete(detectionId), 10000);
}
const detected: ObjectsDetected = {
inputDimensions,
detectionId,
timestamp: now,
detections,
};
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detected);
});
return events;
}
recentDetections = new Map<string, Buffer>();
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
const image = this.recentDetections.get(detectionId);
if (!image)
return;
return mediaManager.createMediaObject(image, 'image/jpeg');
}
async getObjectTypes(): Promise<ObjectDetectionTypes> {
return {
classes: [
...Object.values(detectionMap),
]
}
}
createClient() {
return new HikvisionCameraAPI(this.getHttpAddress(), this.getUsername(), this.getPassword(), this.console);
}
@@ -284,6 +394,9 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
interfaces.push(ScryptedInterface.Intercom);
}
if (this.hasSmartDetection)
interfaces.push(ScryptedInterface.ObjectDetector);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
@@ -408,7 +521,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
const put = this.getClient().request({
url,
method: 'PUT',
responseType: 'readable',
responseType: 'text',
headers: {
'Content-Type': 'application/octet-stream',
// 'Connection': 'close',
@@ -440,6 +553,12 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
forwarder.killPromise.finally(() => {
this.console.log('audio finished');
passthrough.end();
setTimeout(() => {
this.stopIntercom();
}, 1000);
});
put.finally(() => {
this.stopIntercom();
});
@@ -448,7 +567,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
if (response.statusCode !== 200)
forwarder.kill();
})
.catch(() => forwarder.kill());
.catch(() => forwarder.kill());
}
async stopIntercom(): Promise<void> {
@@ -581,4 +700,4 @@ class HikvisionProvider extends RtspProvider {
}
}
export default new HikvisionProvider();
export default HikvisionProvider;

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/onvif",
"version": "0.1.10",
"version": "0.1.13",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/onvif",
"version": "0.1.10",
"version": "0.1.13",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/onvif",
"version": "0.1.10",
"version": "0.1.13",
"description": "ONVIF Camera Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -97,7 +97,7 @@ export class OnvifIntercom implements Intercom {
this.camera.console.log('backchannel transport', transportDict);
const availableMatches = audioBackchannel.rtpmaps.filter(rtpmap => rtpmap.ffmpegEncoder);
const defaultMatch = audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder);
const defaultMatch = audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder === 'pcm_mulaw') || audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder);
if (!defaultMatch)
throw new Error('no supported codec was found for back channel');
@@ -151,7 +151,7 @@ export class OnvifIntercom implements Intercom {
}
const elapsedRtpTimeMs = Math.abs(pending.header.timestamp - p.header.timestamp) / 8000 * 1000;
if (elapsedRtpTimeMs <= 60) {
if (elapsedRtpTimeMs <= 160) {
pending.payload = Buffer.concat([pending.payload, p.payload]);
return;
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/openvino",
"version": "0.1.54",
"version": "0.1.55",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/openvino",
"version": "0.1.54",
"version": "0.1.55",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.54"
"version": "0.1.55"
}

View File

@@ -79,14 +79,19 @@ class OpenVINOPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.S
self.yolo = 'yolo' in model
self.yolov8 = "yolov8" in model
self.yolov9 = "yolov9" in model
self.scrypted_model = "scrypted" in model
self.sigmoid = model == 'yolo-v4-tiny-tf'
print(f'model/mode/precision: {model}/{mode}/{precision}')
ovmodel = 'best' if self.scrypted_model else model
model_version = 'v4'
xmlFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{model}.xml', f'{model_version}/{precision}/{model}.xml')
binFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{model}.bin', f'{model_version}/{precision}/{model}.bin')
if self.yolo:
xmlFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.xml', f'{model_version}/{precision}/{ovmodel}.xml')
binFile = self.downloadFile(f'https://raw.githubusercontent.com/koush/openvino-models/main/{model}/{precision}/{ovmodel}.bin', f'{model_version}/{precision}/{ovmodel}.bin')
if self.scrypted_model:
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/scrypted_labels.txt', 'scrypted_labels.txt')
elif self.yolo:
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/coco_80cl.txt', 'coco_80cl.txt')
else:
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/coco_labels.txt', 'coco_labels.txt')
@@ -132,13 +137,14 @@ class OpenVINOPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.S
'description': 'The detection model used to find objects.',
'choices': [
'Default',
'scrypted_yolov8n_320',
'yolov8n_320',
'yolov9c_320',
'ssd_mobilenet_v1_coco',
'ssdlite_mobilenet_v2',
'yolo-v3-tiny-tf',
'yolo-v4-tiny-tf',
'yolov8n',
'yolov8n_320',
'yolov9c_320',
],
'value': model,
},

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.95",
"version": "0.1.96",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/python-codecs",
"version": "0.1.95",
"version": "0.1.96",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.95",
"version": "0.1.96",
"description": "Python Codecs for Scrypted",
"keywords": [
"scrypted",

View File

@@ -1,6 +1,7 @@
import asyncio
import time
import traceback
import os
from typing import Any, AsyncGenerator, List, Union
import scrypted_sdk
@@ -202,7 +203,7 @@ def multiprocess_exit():
class CodecFork:
def timeoutExit():
def timeoutExit(self):
print("Frame yield timed out, exiting pipeline.")
multiprocess_exit()

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/reolink",
"version": "0.0.63",
"version": "0.0.65",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/reolink",
"version": "0.0.63",
"version": "0.0.65",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/reolink",
"version": "0.0.63",
"version": "0.0.65",
"description": "Reolink Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/snapshot",
"version": "0.2.40",
"version": "0.2.43",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/snapshot",
"version": "0.2.40",
"version": "0.2.43",
"dependencies": {
"@types/node": "^20.10.6",
"sharp": "^0.33.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/snapshot",
"version": "0.2.40",
"version": "0.2.43",
"description": "Snapshot Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -1,5 +1,6 @@
import sdk, { BufferConverter, Image, ImageOptions, MediaObject, MediaObjectOptions, ScryptedDeviceBase, ScryptedMimeTypes } from "@scrypted/sdk";
import type sharp from 'sharp';
import type { KernelEnum } from "sharp";
let hasLoadedSharp = false;
let sharpInstance: typeof sharp;
@@ -8,8 +9,6 @@ export function loadSharp() {
hasLoadedSharp = true;
try {
sharpInstance = require('sharp');
// not exposed by sharp but it exists.
(sharpInstance.kernel as any).linear = 'linear';
console.log('sharp loaded');
}
catch (e) {
@@ -60,11 +59,8 @@ export class VipsImage implements Image {
});
}
if (options?.resize) {
let kernel: string;
let kernel: keyof KernelEnum;
switch (options?.resize.filter) {
case 'bilinear':
kernel = 'linear';
break;
case 'lanczos':
kernel = 'lanczos2';
break;
@@ -75,12 +71,12 @@ export class VipsImage implements Image {
kernel = 'nearest';
break;
default:
kernel = 'linear';
kernel = 'cubic';
break
}
transformed.resize(typeof options.resize.width === 'number' ? Math.floor(options.resize.width) : undefined, typeof options.resize.height === 'number' ? Math.floor(options.resize.height) : undefined, {
fit: "cover",
kernel: kernel as any,
kernel: kernel,
});
}
@@ -132,13 +128,21 @@ export class VipsImage implements Image {
}
}
export async function loadVipsImage(data: Buffer | string, sourceId: string) {
loadSharp();
export async function loadVipsMetadata(data: Buffer | string) {
const image = sharpInstance(data, {
failOn: 'none'
});
const metadata = await image.metadata();
return {
image,
metadata,
}
}
export async function loadVipsImage(data: Buffer | string, sourceId: string) {
loadSharp();
const { image, metadata } = await loadVipsMetadata(data);
const vipsImage = new VipsImage(image, metadata, sourceId);
return vipsImage;
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.1.48",
"version": "0.1.49",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.1.48",
"version": "0.1.49",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -53,5 +53,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.48"
"version": "0.1.49"
}

View File

@@ -79,10 +79,16 @@ class TensorFlowLitePlugin(
model = "efficientdet_lite0_320_ptq"
self.yolo = "yolo" in model
self.yolov8 = "yolov8" in model
self.scrypted_model = "scrypted" in model
print(f'model: {model}')
if self.yolo:
if self.scrypted_model:
labelsFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/tflite-models/{branch}/scrypted_labels.txt",
f"{model_version}/scrypted_labels.txt",
)
elif self.yolo:
labelsFile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/tflite-models/{branch}/coco_80cl.txt",
f"{model_version}/coco_80cl.txt",
@@ -100,9 +106,10 @@ class TensorFlowLitePlugin(
self.interpreter_count = 0
def downloadModel():
tflite_model = "best_full_integer_quant" if self.scrypted_model else model
return self.downloadFile(
f"https://github.com/koush/tflite-models/raw/{branch}/{model}/{model}{suffix}.tflite",
f"{model_version}/{model}{suffix}.tflite",
f"https://github.com/koush/tflite-models/raw/{branch}/{model}/{tflite_model}{suffix}.tflite",
f"{model_version}/{tflite_model}{suffix}.tflite",
)
try:
@@ -175,6 +182,7 @@ class TensorFlowLitePlugin(
"ssdlite_mobiledet_coco_qat_postprocess",
"yolov8n_full_integer_quant",
"yolov8n_full_integer_quant_320",
"scrypted_yolov8n_320",
"efficientdet_lite0_320_ptq",
"efficientdet_lite1_384_ptq",
"efficientdet_lite2_448_ptq",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/webrtc",
"version": "0.2.17",
"version": "0.2.18",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/webrtc",
"version": "0.2.17",
"version": "0.2.18",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/webrtc",
"version": "0.2.17",
"version": "0.2.18",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -406,6 +406,7 @@ class WebRTCTrack implements RTCOutputMediaObjectTrack, RTCInputMediaObjectTrack
constructor(public connectionManagement: WebRTCConnectionManagement, public video: RTCRtpTransceiver, public audio: RTCRtpTransceiver, intercom: Intercom) {
this.control = new ScryptedSessionControl(intercom, audio);
this.connectionManagement.activeTracks.add(this);
}
async onStop(): Promise<void> {
@@ -623,7 +624,7 @@ export class WebRTCConnectionManagement implements RTCConnectionManagement {
async close(): Promise<void> {
for (const track of this.activeTracks) {
track.cleanup(true);
track.cleanup(false);
}
this.activeTracks.clear();
this.pc.close();
@@ -662,7 +663,7 @@ export async function createRTCPeerConnectionSink(
});
track.control.killed.promise.then(() => {
track.cleanup(true);
track.cleanup(false);
connection.pc.close();
});

View File

@@ -627,9 +627,10 @@ export async function fork() {
const cleanup = new Deferred<string>();
cleanup.promise.catch(e => this.console.log('cleaning up rtc connection:', e.message));
cleanup.promise.finally(() => setTimeout(() => process.exit(), 10000));
cleanup.promise.finally(() => setTimeout(() => process.exit(), 30000));
const connection = new WebRTCConnectionManagement(console, clientSession, maximumCompatibilityMode, clientOptions, options);
cleanup.promise.finally(() => connection.close().catch(() => { }));
const { pc } = connection;
waitClosed(pc).then(() => cleanup.resolve('peer connection closed'));

View File

@@ -3,7 +3,7 @@ const { PortablePython } = require('py')
const { once } = require('events');
module.exports = {
version: '3.11',
version: '3.10',
}
async function pipInstall(python, pkg) {

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/server",
"version": "0.94.44",
"version": "0.97.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/server",
"version": "0.94.44",
"version": "0.97.0",
"hasInstallScript": true,
"license": "ISC",
"dependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/server",
"version": "0.95.0",
"version": "0.97.0",
"description": "",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",

View File

@@ -56,7 +56,7 @@ class DeviceProxy(object):
def __init__(self, systemManager: SystemManager, id: str):
self.systemManager = systemManager
self.id = id
self.device: asyncio.Future[rpc.RpcPeer] = None
self.device: asyncio.Future[rpc.RpcProxy] = None
def __getattr__(self, name):
if name == 'id':
@@ -81,7 +81,7 @@ class DeviceProxy(object):
def __apply__(self, method: str, args: list):
if not self.device:
self.device = self.systemManager.api.getDeviceById(self.id)
self.device = asyncio.ensure_future(self.systemManager.api.getDeviceById(self.id))
async def apply():
device = await self.device

View File

@@ -93,11 +93,21 @@ export async function httpFetch<T extends HttpFetchOptions<Readable>>(options: T
}
}
let controller: AbortController;
let timeout: NodeJS.Timeout;
if (options.timeout) {
controller = new AbortController();
timeout = setTimeout(() => controller.abort(), options.timeout);
options.signal?.addEventListener('abort', () => controller.abort('abort'));
}
const request = proto.request(url, {
method: getFetchMethod(options),
rejectUnauthorized: options.rejectUnauthorized,
family: options.family,
headers: nodeHeaders,
signal: controller?.signal || options.signal,
timeout: options.timeout,
});
@@ -105,30 +115,36 @@ export async function httpFetch<T extends HttpFetchOptions<Readable>>(options: T
body.pipe(request);
else
request.end();
const [response] = await once(request, 'response') as [IncomingMessage];
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.statusCode);
try {
const [response] = await once(request, 'response') as [IncomingMessage];
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.statusCode);
}
catch (e) {
readMessageBuffer(response).catch(() => { });
throw e;
}
}
catch (e) {
readMessageBuffer(response).catch(() => { });
throw e;
const incomingHeaders = new Headers();
for (const [k, v] of Object.entries(response.headers)) {
for (const vv of (typeof v === 'string' ? [v] : v)) {
incomingHeaders.append(k, vv)
}
}
return {
statusCode: response.statusCode,
headers: incomingHeaders,
body: await httpFetchParseIncomingMessage(response, options.responseType),
};
}
const incomingHeaders = new Headers();
for (const [k, v] of Object.entries(response.headers)) {
for (const vv of (typeof v === 'string' ? [v] : v)) {
incomingHeaders.append(k, vv)
}
finally {
clearTimeout(timeout);
}
return {
statusCode: response.statusCode,
headers: incomingHeaders,
body: await httpFetchParseIncomingMessage(response, options.responseType),
};
}
function ensureType<T>(v: T) {

View File

@@ -5,6 +5,7 @@ export interface HttpFetchOptionsBase<B> {
family?: 4 | 6;
method?: string;
headers?: HeadersInit;
signal?: AbortSignal,
timeout?: number;
rejectUnauthorized?: boolean;
ignoreStatusCode?: boolean;
@@ -123,30 +124,44 @@ export async function domFetch<T extends HttpFetchOptions<BodyInit>>(options: T)
body = createStringOrBufferBody(headers, body);
}
const { url } = options;
const response = await fetch(url, {
method: getFetchMethod(options),
credentials: options.withCredentials ? 'include' : undefined,
headers,
signal: options.timeout ? AbortSignal.timeout(options.timeout) : undefined,
body,
});
let controller: AbortController;
let timeout: NodeJS.Timeout;
if (options.timeout) {
controller = new AbortController();
timeout = setTimeout(() => controller.abort(), options.timeout);
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.status);
}
catch (e) {
response.arrayBuffer().catch(() => { });
throw e;
}
options.signal?.addEventListener('abort', () => controller.abort('abort'));
}
return {
statusCode: response.status,
headers: response.headers,
body: await domFetchParseIncomingMessage(response, options.responseType),
};
try {
const { url } = options;
const response = await fetch(url, {
method: getFetchMethod(options),
credentials: options.withCredentials ? 'include' : undefined,
headers,
signal: controller?.signal || options.signal,
body,
});
if (!options?.ignoreStatusCode) {
try {
checkStatus(response.status);
}
catch (e) {
response.arrayBuffer().catch(() => { });
throw e;
}
}
return {
statusCode: response.status,
headers: response.headers,
body: await domFetchParseIncomingMessage(response, options.responseType),
};
}
finally {
clearTimeout(timeout);
}
}
function ensureType<T>(v: T) {