Compare commits

..

66 Commits

Author SHA1 Message Date
Koushik Dutta
e5fb65d75e prerelease 2023-03-17 23:42:47 -07:00
Koushik Dutta
290b73f3d9 python-codecs: fix hw acceleration 2023-03-17 23:42:33 -07:00
Koushik Dutta
f717e87306 snapshot: include ffmpeg path 2023-03-17 23:37:20 -07:00
Koushik Dutta
b80ac7c60d prebeta 2023-03-17 23:21:33 -07:00
Koushik Dutta
997a4732ec server: additional python rpc transport fixes 2023-03-17 23:21:07 -07:00
Koushik Dutta
6e08f11578 snapshot/python-codecs: move high performance native image library to larger package as optional dependency 2023-03-17 22:58:29 -07:00
Koushik Dutta
87c4814e6f prebeta 2023-03-17 22:22:39 -07:00
Koushik Dutta
2e0e009719 server: update publish scripts 2023-03-17 22:22:30 -07:00
Koushik Dutta
77399038e9 server: clean up python rpc transports 2023-03-17 22:21:07 -07:00
Koushik Dutta
fae66619fb prepublish 2023-03-17 19:16:50 -07:00
Koushik Dutta
d979b9ec0c server: connection.poll should provide None to block forever 2023-03-17 19:16:15 -07:00
Koushik Dutta
975319a65d motion: implement a default inclusion zone that prevents on screen clocks from triggering motion 2023-03-17 16:19:50 -07:00
Koushik Dutta
7b5aa4ba2d python-codecs: remove erroneous libav from gstreamer settings 2023-03-17 16:19:20 -07:00
Koushik Dutta
670739c82b python-codecs: restructure, add gstreamer decoder option 2023-03-17 10:28:41 -07:00
Koushik Dutta
8511bd15a8 server: update package lock 2023-03-16 23:59:19 -07:00
Koushik Dutta
06d3c89274 prepublish 2023-03-16 23:59:10 -07:00
Koushik Dutta
e13f3eb2f1 server: add python forked processes to stats 2023-03-16 23:59:01 -07:00
Koushik Dutta
001918d613 predict: fix detections from webui 2023-03-16 23:58:45 -07:00
Koushik Dutta
c859c3aa40 detect: publish plugins with new video pipeline support 2023-03-16 23:40:33 -07:00
Koushik Dutta
2bce019677 predict: make models a separate download 2023-03-16 23:29:02 -07:00
Koushik Dutta
6ba3386157 detect: fix peer kill causing exception inside finally handler 2023-03-16 22:10:25 -07:00
Koushik Dutta
51e66d98f9 videoanalysis: changing motion detect mode should restart motion detection 2023-03-16 22:09:56 -07:00
Koushik Dutta
6484804649 server: update package lock 2023-03-16 20:37:54 -07:00
Koushik Dutta
b2a05c099d prepublish 2023-03-16 20:37:42 -07:00
Koushik Dutta
898331da4c Merge branch 'main' of github.com:koush/scrypted 2023-03-16 20:37:33 -07:00
Koushik Dutta
9044e782b2 python-codecs: add gray decoding support 2023-03-16 20:37:28 -07:00
Koushik Dutta
aedb985941 detect: support motion on new pipeline 2023-03-16 20:37:12 -07:00
Koushik Dutta
9ba22e4058 server: fix python rpc kill handling 2023-03-16 20:33:09 -07:00
Alex Leeds
ab0afb61ae ring: add video clips support (#635)
* ring: add video clips support

* fix merge
2023-03-16 18:40:36 -07:00
Alex Leeds
bf00ba0adc ring: add support for locks (#634) 2023-03-16 18:32:20 -07:00
Koushik Dutta
d564cf1b62 server: update package lock 2023-03-16 11:13:24 -07:00
Koushik Dutta
544dfb3b24 Update rtsp-proxy.ts 2023-03-16 10:40:19 -07:00
Koushik Dutta
cf9af910be rtsp: rtsp proxy example 2023-03-16 10:03:24 -07:00
Koushik Dutta
e2e65f93af prepublish 2023-03-16 09:37:34 -07:00
Koushik Dutta
b271567428 server: Fix device initialization on first report 2023-03-16 09:37:25 -07:00
Koushik Dutta
a88a295d9a server: fixup project file 2023-03-15 23:09:16 -07:00
Koushik Dutta
38ba31ca7d tensorflow-lite: use multiple tpu 2023-03-15 23:08:48 -07:00
Koushik Dutta
1c8ff2493b coreml: move prediction onto background thread 2023-03-15 23:04:45 -07:00
Koushik Dutta
5c9f62e6b6 videoanalysis: add snapshot pipeline 2023-03-15 23:04:13 -07:00
Koushik Dutta
6fd8018c52 python-codecs: fix nre 2023-03-15 23:02:50 -07:00
Koushik Dutta
d900ddf5f1 mac: fix erroneous typing installation 2023-03-15 21:54:17 -07:00
Koushik Dutta
e3a8d311ce python-codecs: add libav support 2023-03-15 20:33:44 -07:00
Koushik Dutta
8bbc3d5470 videoanalysis: generator cleanup 2023-03-15 17:18:28 -07:00
Koushik Dutta
00cf987cec videoanalysis: reimplemnet snapshots for new pipeline 2023-03-15 17:03:34 -07:00
Koushik Dutta
7e5dcae64a webrtc/alexa: add option to disable TURN on peers that already have externally reachable addresses 2023-03-15 10:31:25 -07:00
Koushik Dutta
cb67237d7c server: update package lock 2023-03-15 01:28:39 -07:00
Koushik Dutta
4be848c440 prepublish 2023-03-15 01:28:05 -07:00
Koushik Dutta
b33422b066 server: fix python fork hangs 2023-03-15 01:28:01 -07:00
Koushik Dutta
77418684da server: publish 2023-03-14 23:50:22 -07:00
Koushik Dutta
08cf9f7774 prepublish 2023-03-14 23:49:51 -07:00
Koushik Dutta
9f2fabf9c0 Merge branch 'main' of github.com:koush/scrypted 2023-03-14 23:47:24 -07:00
Koushik Dutta
e2e1c7be44 server: remove python log statement 2023-03-14 23:47:05 -07:00
Koushik Dutta
ba030ba197 server: fix multiprocessing blocking read on linux 2023-03-14 23:45:06 -07:00
Koushik Dutta
a4f37bdc16 snapshot: publish 2023-03-14 23:42:33 -07:00
Koushik Dutta
f6c7b00562 tensorflow-lite: fix numpy serialization issue 2023-03-14 23:41:55 -07:00
Koushik Dutta
b951614f7c Merge branch 'main' of github.com:koush/scrypted 2023-03-14 20:13:28 -07:00
Koushik Dutta
f1dfdb3494 coreml: revert tracker dependency removal 2023-03-14 20:13:22 -07:00
Nick Berardi
ffbd25b13b alexa: set screen ratio to 720p (#625) 2023-03-14 18:40:47 -07:00
Koushik Dutta
4f03fe2420 docker: fix pyvips cffi mismatch 2023-03-14 18:00:10 -07:00
Koushik Dutta
ffdb386afa mac: include libvips in installer 2023-03-14 17:25:47 -07:00
Koushik Dutta
9eeeaa79d0 docker: include libvips 2023-03-14 16:08:22 -07:00
Koushik Dutta
4163142d1e Merge branch 'main' of github.com:koush/scrypted 2023-03-14 15:45:35 -07:00
Koushik Dutta
71cddc67e0 predict: publish new pipeline support 2023-03-14 15:45:30 -07:00
Alex Leeds
2cbc4eb54f eufy: support multiple p2p streams (#624) 2023-03-14 15:26:46 -07:00
Koushik Dutta
fc94fb4221 core: republish 2023-03-14 15:21:13 -07:00
Koushik Dutta
85ed41c590 server: publish 2023-03-14 15:15:09 -07:00
68 changed files with 1484 additions and 4540 deletions

56
common/test/rtsp-proxy.ts Normal file
View File

@@ -0,0 +1,56 @@
import net from 'net';
import { listenZero } from '../src/listen-cluster';
import { RtspClient, RtspServer } from '../src/rtsp-server';
async function main() {
const server = net.createServer(async serverSocket => {
const client = new RtspClient('rtsp://localhost:57594/911db962087f904d');
await client.options();
const describeResponse = await client.describe();
const sdp = describeResponse.body.toString();
const server = new RtspServer(serverSocket, sdp, true);
const setupResponse = await server.handlePlayback();
if (setupResponse !== 'play') {
serverSocket.destroy();
client.client.destroy();
return;
}
console.log('playback handled');
let channel = 0;
for (const track of Object.keys(server.setupTracks)) {
const setupTrack = server.setupTracks[track];
await client.setup({
// type: 'udp',
type: 'tcp',
port: channel,
path: setupTrack.control,
onRtp(rtspHeader, rtp) {
server.sendTrack(setupTrack.control, rtp, false);
},
});
channel += 2;
}
await client.play();
console.log('client playing');
await client.readLoop();
});
let port: number;
if (false) {
port = await listenZero(server);
}
else {
port = 5555;
server.listen(5555)
}
console.log(`rtsp://127.0.0.1:${port}`);
}
main();

View File

@@ -33,7 +33,8 @@ RUN apt-get -y install \
gcc \
libgirepository1.0-dev \
libglib2.0-dev \
pkg-config
pkg-config \
libvips
# ffmpeg
RUN apt-get -y install \
@@ -61,6 +62,9 @@ RUN apt-get -y install \
# python pip
RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions typing psutil
################################################################

View File

@@ -40,13 +40,14 @@ echo "Installing Scrypted dependencies..."
RUN_IGNORE xcode-select --install
RUN brew update
RUN_IGNORE brew install node@18
# needed by scrypted-ffmpeg
RUN_IGNORE brew install sdl2
# snapshot plugin and others
RUN brew install libvips
# gstreamer plugins
RUN_IGNORE brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly
# gst python bindings
RUN_IGNORE brew install gst-python
# python image library
# todo: consider removing this
RUN_IGNORE brew install pillow
### HACK WORKAROUND
@@ -102,7 +103,11 @@ then
fi
RUN python$PYTHON_VERSION -m pip install --upgrade pip
RUN python$PYTHON_VERSION -m pip install aiofiles debugpy typing_extensions typing opencv-python psutil
if [ "$PYTHON_VERSION" != "3.10" ]
then
RUN python$PYTHON_VERSION -m pip install typing
fi
RUN python$PYTHON_VERSION -m pip install aiofiles debugpy typing_extensions opencv-python psutil
echo "Installing Scrypted Launch Agent..."

View File

@@ -30,7 +30,8 @@ RUN apt-get -y install \
gcc \
libgirepository1.0-dev \
libglib2.0-dev \
pkg-config
pkg-config \
libvips
# ffmpeg
RUN apt-get -y install \
@@ -58,6 +59,9 @@ RUN apt-get -y install \
# python pip
RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions typing psutil
################################################################

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/alexa",
"version": "0.2.0",
"version": "0.2.3",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",
@@ -39,6 +39,6 @@
},
"devDependencies": {
"@types/node": "^18.4.2",
"@scrypted/sdk": "^0.2.70"
"@scrypted/sdk": "../../sdk"
}
}

View File

@@ -17,6 +17,13 @@ export class AlexaSignalingSession implements RTCSignalingSession {
sdp: this.directive.payload.offer.value,
},
disableTrickle: true,
disableTurn: true,
// this could be a low resolution screen, no way of knowning, so never send a 1080p stream
screen: {
devicePixelRatio: 1,
width: 1280,
height: 720
}
}
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.1.102",
"version": "0.1.103",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.1.102",
"version": "0.1.103",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.1.102",
"version": "0.1.103",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -1,20 +0,0 @@
#!/bin/sh
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
rm -rf all_models
mkdir -p all_models
cd all_models
wget https://github.com/koush/coreml-survival-guide/raw/master/MobileNetV2%2BSSDLite/ObjectDetection/ObjectDetection/MobileNetV2_SSDLite.mlmodel
wget https://raw.githubusercontent.com/koush/coreml-survival-guide/master/MobileNetV2%2BSSDLite/coco_labels.txt

View File

@@ -1 +0,0 @@
../all_models/MobileNetV2_SSDLite.mlmodel

View File

@@ -1 +0,0 @@
../all_models/coco_labels.txt

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/coreml",
"version": "0.0.24",
"version": "0.1.2",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.0.24",
"version": "0.1.2",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.24"
"version": "0.1.2"
}

View File

@@ -6,6 +6,10 @@ from predict import PredictPlugin, Prediction, Rectangle
import coremltools as ct
import os
from PIL import Image
import asyncio
import concurrent.futures
predictExecutor = concurrent.futures.ThreadPoolExecutor(2, "CoreML-Predict")
def parse_label_contents(contents: str):
lines = contents.splitlines()
@@ -25,17 +29,19 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
def __init__(self, nativeId: str | None = None):
super().__init__(MIME_TYPE, nativeId=nativeId)
modelPath = os.path.join(os.environ['SCRYPTED_PLUGIN_VOLUME'], 'zip', 'unzipped', 'fs', 'MobileNetV2_SSDLite.mlmodel')
self.model = ct.models.MLModel(modelPath)
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/coreml-survival-guide/master/MobileNetV2%2BSSDLite/coco_labels.txt', 'coco_labels.txt')
modelFile = self.downloadFile('https://github.com/koush/coreml-survival-guide/raw/master/MobileNetV2%2BSSDLite/ObjectDetection/ObjectDetection/MobileNetV2_SSDLite.mlmodel', 'MobileNetV2_SSDLite.mlmodel')
self.model = ct.models.MLModel(modelFile)
self.modelspec = self.model.get_spec()
self.inputdesc = self.modelspec.description.input[0]
self.inputheight = self.inputdesc.type.imageType.height
self.inputwidth = self.inputdesc.type.imageType.width
labels_contents = scrypted_sdk.zip.open(
'fs/coco_labels.txt').read().decode('utf8')
labels_contents = open(labelsFile, 'r').read()
self.labels = parse_label_contents(labels_contents)
self.loop = asyncio.get_event_loop()
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
@@ -44,8 +50,12 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
def get_input_size(self) -> Tuple[float, float]:
return (self.inputwidth, self.inputheight)
def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
out_dict = self.model.predict({'image': input, 'confidenceThreshold': .2 })
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
# run in executor if this is the plugin loop
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(predictExecutor, lambda: self.model.predict({'image': input, 'confidenceThreshold': .2 }))
else:
out_dict = self.model.predict({'image': input, 'confidenceThreshold': .2 })
coordinatesList = out_dict['coordinates']

View File

@@ -3,3 +3,8 @@ Pillow>=5.4.1
PyGObject>=3.30.4
coremltools~=6.1
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
# sort_oh
scipy
filterpy
numpy

View File

@@ -33,6 +33,7 @@
}
},
"../../packages/h264-repacketizer": {
"name": "@scrypted/h264-repacketizer",
"version": "0.0.6",
"license": "ISC",
"devDependencies": {
@@ -43,7 +44,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.84",
"version": "0.2.85",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",

View File

@@ -2,7 +2,7 @@ import { listenSingleRtspClient } from '@scrypted/common/src/rtsp-server';
import { addTrackControls, parseSdp } from '@scrypted/common/src/sdp-utils';
import sdk, { Battery, Camera, Device, DeviceProvider, FFmpegInput, MediaObject, MotionSensor, RequestMediaStreamOptions, RequestPictureOptions, ResponseMediaStreamOptions, ResponsePictureOptions, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, Setting, Settings, SettingValue, VideoCamera } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import eufy, { CaptchaOptions, EufySecurity } from 'eufy-security-client';
import eufy, { CaptchaOptions, EufySecurity, P2PClientProtocol, P2PConnectionType } from 'eufy-security-client';
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
import { Deferred } from '@scrypted/common/src/deferred';
@@ -11,17 +11,14 @@ import { LocalLivestreamManager } from './stream';
const { deviceManager, mediaManager, systemManager } = sdk;
class EufyCamera extends ScryptedDeviceBase implements Camera, VideoCamera, Battery, MotionSensor {
class EufyCamera extends ScryptedDeviceBase implements VideoCamera, MotionSensor {
client: EufySecurity;
device: eufy.Camera;
livestreamManager: LocalLivestreamManager
constructor(nativeId: string, client: EufySecurity, device: eufy.Camera) {
super(nativeId);
this.client = client;
this.device = device;
this.livestreamManager = new LocalLivestreamManager(this.client, this.device, this.console);
this.batteryLevel = this.device.getBatteryValue() as number;
this.setupMotionDetection();
}
@@ -37,31 +34,6 @@ class EufyCamera extends ScryptedDeviceBase implements Camera, VideoCamera, Batt
this.device.on('radar motion detected', handle);
}
async takePicture(options?: RequestPictureOptions): Promise<MediaObject> {
// if this stream is prebuffered, its safe to use the prebuffer to generate an image
const realDevice = systemManager.getDeviceById<VideoCamera>(this.id);
try {
const msos = await realDevice.getVideoStreamOptions();
const prebuffered: RequestMediaStreamOptions = msos.find(mso => mso.prebuffer);
if (prebuffered) {
prebuffered.refresh = false;
return realDevice.getVideoStream(prebuffered);
}
} catch (e) {}
// try to fetch the cloud image if one exists
const url = this.device.getLastCameraImageURL();
if (url) {
return mediaManager.createMediaObjectFromUrl(url.toString());
}
throw new Error("snapshot unavailable");
}
getPictureOptions(): Promise<ResponsePictureOptions[]> {
return;
}
getVideoStream(options?: ResponseMediaStreamOptions): Promise<MediaObject> {
return this.createVideoStream(options);
}
@@ -80,15 +52,32 @@ class EufyCamera extends ScryptedDeviceBase implements Camera, VideoCamera, Batt
},
tool: 'scrypted',
userConfigurable: false,
}
},
{
container: 'rtsp',
id: 'p2p-low',
name: 'P2P (Low Resolution)',
video: {
codec: 'h264',
width: 1280,
height: 720,
},
audio: {
codec: 'aac',
},
tool: 'scrypted',
userConfigurable: false,
},
];
}
async createVideoStream(options?: ResponseMediaStreamOptions): Promise<MediaObject> {
const livestreamManager = new LocalLivestreamManager(options.id, this.client, this.device, this.console);
const kill = new Deferred<void>();
kill.promise.finally(() => {
this.console.log('video stream exited');
this.livestreamManager.stopLocalLiveStream();
livestreamManager.stopLocalLiveStream();
});
const rtspServer = await listenSingleRtspClient();
@@ -138,7 +127,7 @@ class EufyCamera extends ScryptedDeviceBase implements Camera, VideoCamera, Batt
await rtsp.handlePlayback();
});
const proxyStream = await this.livestreamManager.getLocalLivestream();
const proxyStream = await livestreamManager.getLocalLivestream();
proxyStream.videostream.pipe(process.cp.stdio[4] as Writable);
proxyStream.audiostream.pipe((process.cp.stdio as any)[5] as Writable);
}
@@ -240,14 +229,13 @@ class EufyPlugin extends ScryptedDeviceBase implements DeviceProvider, Settings
password: this.storageSettings.values.password,
country: this.storageSettings.values.country,
language: 'en',
p2pConnectionSetup: 2,
p2pConnectionSetup: P2PConnectionType.QUICKEST,
pollingIntervalMinutes: 10,
eventDurationSeconds: 10
}
this.client = await EufySecurity.initialize(config);
this.client.on('device added', this.deviceAdded.bind(this));
this.client.on('station added', this.stationAdded.bind(this));
this.client.on('tfa request', () => {
this.log.a('Login failed: 2FA is enabled, check your email or texts for your code, then enter it into the Two Factor Code setting to conplete login.');
});
@@ -277,7 +265,6 @@ class EufyPlugin extends ScryptedDeviceBase implements DeviceProvider, Settings
const nativeId = eufyDevice.getSerial();
const interfaces = [
ScryptedInterface.Camera,
ScryptedInterface.VideoCamera
];
if (eufyDevice.hasBattery())

View File

@@ -1,7 +1,7 @@
// Based off of https://github.com/homebridge-eufy-security/plugin/blob/master/src/plugin/controller/LocalLivestreamManager.ts
import { Camera, CommandData, CommandName, CommandType, Device, DeviceType, EufySecurity, isGreaterEqualMinVersion, P2PClientProtocol, ParamType, Station, StreamMetadata, VideoCodec } from 'eufy-security-client';
import { EventEmitter, Readable } from 'stream';
import { Station, Device, StreamMetadata, Camera, EufySecurity } from 'eufy-security-client';
type StationStream = {
station: Station;
@@ -19,28 +19,39 @@ export class LocalLivestreamManager extends EventEmitter {
private livestreamStartedAt: number | null;
private livestreamIsStarting = false;
private readonly id: string;
private readonly client: EufySecurity;
private readonly device: Camera;
constructor(client: EufySecurity, device: Camera, console: Console) {
private station: Station;
private p2pSession: P2PClientProtocol;
constructor(id: string, client: EufySecurity, device: Camera, console: Console) {
super();
this.id = id;
this.console = console;
this.client = client;
this.device = device;
this.client.getStation(this.device.getStationSerial()).then( (station) => {
this.station = station;
this.p2pSession = new P2PClientProtocol(station.getRawStation(), this.client.getApi(), station.getIPAddress());
this.p2pSession.on("livestream started", (channel: number, metadata: StreamMetadata, videostream: Readable, audiostream: Readable) => {
this.onStationLivestreamStart(station, device, metadata, videostream, audiostream);
});
this.p2pSession.on("livestream stopped", (channel: number) => {
this.onStationLivestreamStop(station, device);
});
this.p2pSession.on("livestream error", (channel: number, error: Error) => {
this.stopLivestream();
});
});
this.stationStream = null;
this.livestreamStartedAt = null;
this.initialize();
this.client.on('station livestream stop', (station: Station, device: Device) => {
this.onStationLivestreamStop(station, device);
});
this.client.on('station livestream start',
(station: Station, device: Device, metadata: StreamMetadata, videostream: Readable, audiostream: Readable) => {
this.onStationLivestreamStart(station, device, metadata, videostream, audiostream);
});
}
private initialize() {
@@ -55,10 +66,10 @@ export class LocalLivestreamManager extends EventEmitter {
}
public async getLocalLivestream(): Promise<StationStream> {
this.console.debug(this.device.getName(), 'New instance requests livestream.');
this.console.debug(this.device.getName(), this.id, 'New instance requests livestream.');
if (this.stationStream) {
const runtime = (Date.now() - this.livestreamStartedAt!) / 1000;
this.console.debug(this.device.getName(), 'Using livestream that was started ' + runtime + ' seconds ago.');
this.console.debug(this.device.getName(), this.id, 'Using livestream that was started ' + runtime + ' seconds ago.');
return this.stationStream;
} else {
return await this.startAndGetLocalLiveStream();
@@ -67,17 +78,17 @@ export class LocalLivestreamManager extends EventEmitter {
private async startAndGetLocalLiveStream(): Promise<StationStream> {
return new Promise((resolve, reject) => {
this.console.debug(this.device.getName(), 'Start new station livestream (P2P Session)...');
this.console.debug(this.device.getName(), this.id, 'Start new station livestream...');
if (!this.livestreamIsStarting) { // prevent multiple stream starts from eufy station
this.livestreamIsStarting = true;
this.client.startStationLivestream(this.device.getSerial());
this.startStationLivestream();
} else {
this.console.debug(this.device.getName(), 'stream is already starting. waiting...');
this.console.debug(this.device.getName(), this.id, 'stream is already starting. waiting...');
}
this.once('livestream start', async () => {
if (this.stationStream !== null) {
this.console.debug(this.device.getName(), 'New livestream started.');
this.console.debug(this.device.getName(), this.id, 'New livestream started.');
this.livestreamIsStarting = false;
resolve(this.stationStream);
} else {
@@ -87,15 +98,102 @@ export class LocalLivestreamManager extends EventEmitter {
});
}
private async startStationLivestream(videoCodec: VideoCodec = VideoCodec.H264): Promise<void> {
const commandData: CommandData = {
name: CommandName.DeviceStartLivestream,
value: videoCodec
};
this.console.debug(this.device.getName(), this.id, `Sending start livestream command to station ${this.station.getSerial()}`);
const rsa_key = this.p2pSession.getRSAPrivateKey();
if (this.device.isSoloCameras() || this.device.getDeviceType() === DeviceType.FLOODLIGHT_CAMERA_8423 || this.device.isWiredDoorbellT8200X()) {
this.console.debug(this.device.getName(), this.id, `Using CMD_DOORBELL_SET_PAYLOAD (1) for station ${this.station.getSerial()} (main_sw_version: ${this.station.getSoftwareVersion()})`);
await this.p2pSession.sendCommandWithStringPayload({
commandType: CommandType.CMD_DOORBELL_SET_PAYLOAD,
value: JSON.stringify({
"commandType": ParamType.COMMAND_START_LIVESTREAM,
"data": {
"accountId": this.station.getRawStation().member.admin_user_id,
"encryptkey": rsa_key?.exportKey("components-public").n.slice(1).toString("hex"),
"streamtype": videoCodec
}
}),
channel: this.device.getChannel()
}, {
command: commandData
});
} else if (this.device.isWiredDoorbell() || (this.device.isFloodLight() && this.device.getDeviceType() !== DeviceType.FLOODLIGHT) || this.device.isIndoorCamera() || (this.device.getSerial().startsWith("T8420") && isGreaterEqualMinVersion("2.0.4.8", this.station.getSoftwareVersion()))) {
this.console.debug(this.device.getName(), this.id, `Using CMD_DOORBELL_SET_PAYLOAD (2) for station ${this.station.getSerial()} (main_sw_version: ${this.station.getSoftwareVersion()})`);
await this.p2pSession.sendCommandWithStringPayload({
commandType: CommandType.CMD_DOORBELL_SET_PAYLOAD,
value: JSON.stringify({
"commandType": ParamType.COMMAND_START_LIVESTREAM,
"data": {
"account_id": this.station.getRawStation().member.admin_user_id,
"encryptkey": rsa_key?.exportKey("components-public").n.slice(1).toString("hex"),
"streamtype": videoCodec
}
}),
channel: this.device.getChannel()
}, {
command: commandData
});
} else {
if ((Device.isIntegratedDeviceBySn(this.station.getSerial()) || !isGreaterEqualMinVersion("2.0.9.7", this.station.getSoftwareVersion())) && (!this.station.getSerial().startsWith("T8420") || !isGreaterEqualMinVersion("1.0.0.25", this.station.getSoftwareVersion()))) {
this.console.debug(this.device.getName(), this.id, `Using CMD_START_REALTIME_MEDIA for station ${this.station.getSerial()} (main_sw_version: ${this.station.getSoftwareVersion()})`);
await this.p2pSession.sendCommandWithInt({
commandType: CommandType.CMD_START_REALTIME_MEDIA,
value: this.device.getChannel(),
strValue: rsa_key?.exportKey("components-public").n.slice(1).toString("hex"),
channel: this.device.getChannel()
}, {
command: commandData
});
} else {
this.console.debug(this.device.getName(), this.id, `Using CMD_SET_PAYLOAD for station ${this.station.getSerial()} (main_sw_version: ${this.station.getSoftwareVersion()})`);
await this.p2pSession.sendCommandWithStringPayload({
commandType: CommandType.CMD_SET_PAYLOAD,
value: JSON.stringify({
"account_id": this.station.getRawStation().member.admin_user_id,
"cmd": CommandType.CMD_START_REALTIME_MEDIA,
"mValue3": CommandType.CMD_START_REALTIME_MEDIA,
"payload": {
"ClientOS": "Android",
"key": rsa_key?.exportKey("components-public").n.slice(1).toString("hex"),
"streamtype": videoCodec === VideoCodec.H264 ? 1 : 2,
}
}),
channel: this.device.getChannel()
}, {
command: commandData
});
}
}
}
public stopLocalLiveStream(): void {
this.console.debug(this.device.getName(), 'Stopping station livestream.');
this.client.stopStationLivestream(this.device.getSerial());
this.console.debug(this.device.getName(), this.id, 'Stopping station livestream.');
this.stopLivestream();
this.initialize();
}
private async stopLivestream(): Promise<void> {
const commandData: CommandData = {
name: CommandName.DeviceStopLivestream
};
this.console.debug(this.device.getName(), this.id, `Sending stop livestream command to station ${this.station.getSerial()}`);
await this.p2pSession.sendCommandWithInt({
commandType: CommandType.CMD_STOP_REALTIME_MEDIA,
value: this.device.getChannel(),
channel: this.device.getChannel()
}, {
command: commandData
});
}
private onStationLivestreamStop(station: Station, device: Device) {
if (device.getSerial() === this.device.getSerial()) {
this.console.info(station.getName() + ' station livestream for ' + device.getName() + ' has stopped.');
this.console.info(this.id + ' - ' + station.getName() + ' station livestream for ' + device.getName() + ' has stopped.');
this.initialize();
}
}
@@ -111,17 +209,17 @@ export class LocalLivestreamManager extends EventEmitter {
if (this.stationStream) {
const diff = (Date.now() - this.stationStream.createdAt) / 1000;
if (diff < 5) {
this.console.warn(this.device.getName(), 'Second livestream was started from station. Ignore.');
this.console.warn(this.device.getName(), this.id, 'Second livestream was started from station. Ignore.');
return;
}
}
this.initialize(); // important to prevent unwanted behaviour when the eufy station emits the 'livestream start' event multiple times
this.console.info(station.getName() + ' station livestream (P2P session) for ' + device.getName() + ' has started.');
this.console.info(this.id + ' - ' + station.getName() + ' station livestream (P2P session) for ' + device.getName() + ' has started.');
this.livestreamStartedAt = Date.now();
const createdAt = Date.now();
this.stationStream = {station, device, metadata, videostream, audiostream, createdAt};
this.console.debug(this.device.getName(), 'Stream metadata: ' + JSON.stringify(this.stationStream.metadata));
this.console.debug(this.device.getName(), this.id, 'Stream metadata: ' + JSON.stringify(this.stationStream.metadata));
this.emit('livestream start');
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/objectdetector",
"version": "0.0.102",
"version": "0.0.108",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/objectdetector",
"version": "0.0.102",
"version": "0.0.108",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/objectdetector",
"version": "0.0.102",
"version": "0.0.108",
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
"author": "Scrypted",
"license": "Apache-2.0",
@@ -38,7 +38,10 @@
"Settings",
"MixinProvider"
],
"realfs": true
"realfs": true,
"pluginDependencies": [
"@scrypted/python-codecs"
]
},
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,4 +1,4 @@
import sdk, { VideoFrameGenerator, Camera, DeviceState, EventListenerRegister, MediaObject, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera } from '@scrypted/sdk';
import sdk, { ScryptedMimeTypes, Image, VideoFrame, VideoFrameGenerator, Camera, DeviceState, EventListenerRegister, MediaObject, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, MediaStreamDestination } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import crypto from 'crypto';
import cloneDeep from 'lodash/cloneDeep';
@@ -50,6 +50,22 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
detections = new Map<string, MediaObject>();
cameraDevice: ScryptedDevice & Camera & VideoCamera & MotionSensor & ObjectDetector;
storageSettings = new StorageSettings(this, {
newPipeline: {
title: 'Video Pipeline',
description: 'Configure how frames are provided to the video analysis pipeline.',
onGet: async () => {
const choices = [
'Default',
...getAllDevices().filter(d => d.interfaces.includes(ScryptedInterface.VideoFrameGenerator)).map(d => d.name),
];
if (!this.hasMotionType)
choices.push('Snapshot');
return {
choices,
}
},
defaultValue: 'Default',
},
motionSensorSupplementation: {
title: 'Built-In Motion Sensor',
description: `This camera has a built in motion sensor. Using ${this.objectDetection.name} may be unnecessary and will use additional CPU. Replace will ignore the built in motion sensor. Filter will verify the motion sent by built in motion sensor. The Default is ${BUILTIN_MOTION_SENSOR_REPLACE}.`,
@@ -59,6 +75,10 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
BUILTIN_MOTION_SENSOR_REPLACE,
],
defaultValue: "Default",
onPut: () => {
this.endObjectDetection();
this.maybeStartMotionDetection();
}
},
captureMode: {
title: 'Capture Mode',
@@ -128,7 +148,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
analyzeStop = 0;
lastDetectionInput = 0;
constructor(public plugin: ObjectDetectionPlugin, mixinDevice: VideoCamera & Camera & MotionSensor & ObjectDetector & Settings, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: { [key: string]: any }, providerNativeId: string, public objectDetection: ObjectDetection & ScryptedDevice, modelName: string, group: string, public hasMotionType: boolean, public settings: Setting[]) {
constructor(public plugin: ObjectDetectionPlugin, mixinDevice: VideoCamera & Camera & MotionSensor & ObjectDetector & Settings, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: { [key: string]: any }, providerNativeId: string, public objectDetection: ObjectDetection & ScryptedDevice, public model: ObjectDetectionModel, group: string, public hasMotionType: boolean, public settings: Setting[]) {
super({
mixinDevice, mixinDeviceState,
mixinProviderNativeId: providerNativeId,
@@ -139,7 +159,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
});
this.cameraDevice = systemManager.getDeviceById<Camera & VideoCamera & MotionSensor & ObjectDetector>(this.id);
this.detectionId = modelName + '-' + this.cameraDevice.id;
this.detectionId = model.name + '-' + this.cameraDevice.id;
this.bindObjectDetection();
this.register();
@@ -157,7 +177,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (this.hasMotionType) {
// force a motion detection restart if it quit
if (this.motionSensorSupplementation === BUILTIN_MOTION_SENSOR_REPLACE)
await this.startVideoDetection();
await this.startStreamAnalysis();
return;
}
}, this.storageSettings.values.detectionInterval * 1000);
@@ -210,7 +230,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
if (this.motionSensorSupplementation !== BUILTIN_MOTION_SENSOR_REPLACE)
return;
await this.startVideoDetection();
await this.startStreamAnalysis();
}
endObjectDetection() {
@@ -296,7 +316,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
if (!this.detectorRunning)
this.console.log('built in motion sensor started motion, starting video detection.');
await this.startVideoDetection();
await this.startStreamAnalysis();
return;
}
@@ -473,22 +493,70 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (this.detectorRunning)
return;
const stream = await this.cameraDevice.getVideoStream({
destination: 'local-recorder',
// ask rebroadcast to mute audio, not needed.
audio: null,
});
this.detectorRunning = true;
this.analyzeStop = Date.now() + this.getDetectionDuration();
const videoFrameGenerator = this.newPipeline as VideoFrameGenerator;
const newPipeline = this.newPipeline;
let generator: () => Promise<AsyncGenerator<VideoFrame & MediaObject>>;
if (newPipeline === 'Snapshot' && !this.hasMotionType) {
const self = this;
generator = async () => (async function* gen() {
try {
while (self.detectorRunning) {
const now = Date.now();
const sleeper = async () => {
const diff = now + 1100 - Date.now();
if (diff > 0)
await sleep(diff);
};
let image: MediaObject & VideoFrame;
try {
const mo = await self.cameraDevice.takePicture({
reason: 'event',
});
image = await sdk.mediaManager.convertMediaObject(mo, ScryptedMimeTypes.Image);
}
catch (e) {
self.console.error('Video analysis snapshot failed. Will retry in a moment.');
await sleeper();
continue;
}
// self.console.log('yield')
yield image;
// self.console.log('done yield')
await sleeper();
}
}
finally {
self.console.log('Snapshot generation finished.');
}
})();
}
else {
const destination: MediaStreamDestination = this.hasMotionType ? 'low-resolution' : 'local-recorder';
const videoFrameGenerator = systemManager.getDeviceById<VideoFrameGenerator>(newPipeline);
if (!videoFrameGenerator)
throw new Error('invalid VideoFrameGenerator');
const stream = await this.cameraDevice.getVideoStream({
destination,
// ask rebroadcast to mute audio, not needed.
audio: null,
});
generator = async () => videoFrameGenerator.generateVideoFrames(stream, {
resize: this.model?.inputSize ? {
width: this.model.inputSize[0],
height: this.model.inputSize[1],
} : undefined,
format: this.model?.inputFormat,
});
}
try {
const start = Date.now();
let detections = 0;
for await (const detected
of await this.objectDetection.generateObjectDetections(await videoFrameGenerator.generateVideoFrames(stream), {
of await this.objectDetection.generateObjectDetections(await generator(), {
settings: this.getCurrentSettings(),
})) {
if (!this.detectorRunning) {
@@ -529,9 +597,15 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
// this.console.log('image saved', detected.detected.detections);
}
this.reportObjectDetections(detected.detected);
if (this.hasMotionType) {
await sleep(250);
}
// this.handleDetectionEvent(detected.detected);
}
}
catch (e) {
this.console.error('video pipeline ended with error', e);
}
finally {
this.endObjectDetection();
}
@@ -610,6 +684,19 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
}
normalizeBox(boundingBox: [number, number, number, number], inputDimensions: [number, number]) {
let [x, y, width, height] = boundingBox;
let x2 = x + width;
let y2 = y + height;
// the zones are point paths in percentage format
x = x * 100 / inputDimensions[0];
y = y * 100 / inputDimensions[1];
x2 = x2 * 100 / inputDimensions[0];
y2 = y2 * 100 / inputDimensions[1];
const box = [[x, y], [x2, y], [x2, y2], [x, y2]];
return box;
}
getDetectionDuration() {
// when motion type, the detection interval is a keepalive reset.
// the duration needs to simply be an arbitrarily longer time.
@@ -626,15 +713,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
continue;
o.zones = []
let [x, y, width, height] = o.boundingBox;
let x2 = x + width;
let y2 = y + height;
// the zones are point paths in percentage format
x = x * 100 / detection.inputDimensions[0];
y = y * 100 / detection.inputDimensions[1];
x2 = x2 * 100 / detection.inputDimensions[0];
y2 = y2 * 100 / detection.inputDimensions[1];
const box = [[x, y], [x2, y], [x2, y2], [x, y2]];
const box = this.normalizeBox(o.boundingBox, detection.inputDimensions);
let included: boolean;
for (const [zone, zoneValue] of Object.entries(this.zones)) {
@@ -674,6 +753,14 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
}
// if this is a motion sensor and there are no inclusion zones set up,
// use a default inclusion zone that crops the top and bottom to
// prevents errant motion from the on screen time changing every second.
if (this.hasMotionType && included === undefined) {
const defaultInclusionZone = [[0, 10], [100, 10], [100, 90], [0, 90]];
included = polygonOverlap(box, defaultInclusionZone);
}
// if there are inclusion zones and this object
// was not in any of them, filter it out.
if (included === false)
@@ -849,8 +936,18 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
get newPipeline() {
return this.plugin.storageSettings.values.newPipeline;
}
if (!this.plugin.storageSettings.values.newPipeline)
return;
const newPipeline = this.storageSettings.values.newPipeline;
if (!newPipeline)
return newPipeline;
if (newPipeline === 'Snapshot')
return newPipeline;
const pipelines = getAllDevices().filter(d => d.interfaces.includes(ScryptedInterface.VideoFrameGenerator));
const found = pipelines.find(p => p.name === newPipeline);
return found?.id || pipelines[0]?.id;
}
async getMixinSettings(): Promise<Setting[]> {
const settings: Setting[] = [];
@@ -872,7 +969,8 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
this.storageSettings.settings.motionSensorSupplementation.hide = !this.hasMotionType || !this.mixinDeviceInterfaces.includes(ScryptedInterface.MotionSensor);
this.storageSettings.settings.captureMode.hide = this.hasMotionType || !!this.newPipeline;
this.storageSettings.settings.captureMode.hide = this.hasMotionType || !!this.plugin.storageSettings.values.newPipeline;
this.storageSettings.settings.newPipeline.hide = this.hasMotionType || !this.plugin.storageSettings.values.newPipeline;
this.storageSettings.settings.detectionDuration.hide = this.hasMotionType;
this.storageSettings.settings.detectionTimeout.hide = this.hasMotionType;
this.storageSettings.settings.motionDuration.hide = !this.hasMotionType;
@@ -1123,7 +1221,7 @@ class ObjectDetectorMixin extends MixinDeviceBase<ObjectDetection> implements Mi
const settings = this.model.settings;
const ret = new ObjectDetectionMixin(this.plugin, mixinDevice, mixinDeviceInterfaces, mixinDeviceState, this.mixinProviderNativeId, objectDetection, this.model.name, group, hasMotionType, settings);
const ret = new ObjectDetectionMixin(this.plugin, mixinDevice, mixinDeviceInterfaces, mixinDeviceState, this.mixinProviderNativeId, objectDetection, this.model, group, hasMotionType, settings);
this.currentMixins.add(ret);
return ret;
}
@@ -1141,8 +1239,7 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings
newPipeline: {
title: 'New Video Pipeline',
description: 'WARNING! DO NOT ENABLE: Use the new video pipeline. Leave blank to use the legacy pipeline.',
type: 'device',
deviceFilter: `interfaces.includes('${ScryptedInterface.VideoFrameGenerator}')`,
type: 'boolean',
},
activeMotionDetections: {
title: 'Active Motion Detection Sessions',

View File

@@ -9,3 +9,4 @@ dist/*.js
dist/*.txt
__pycache__
all_models
.venv

View File

@@ -16,6 +16,6 @@
"scrypted.pythonRemoteRoot": "${config:scrypted.serverRoot}/volume/plugin.zip",
"python.analysis.extraPaths": [
"./node_modules/@scrypted/sdk/scrypted_python"
"./node_modules/@scrypted/sdk/types/scrypted_python"
]
}

View File

@@ -1,50 +1,52 @@
{
"name": "@scrypted/opencv",
"version": "0.0.64",
"version": "0.0.66",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/opencv",
"version": "0.0.64",
"version": "0.0.66",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.1.17",
"version": "0.2.85",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.16.7",
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^8.2.3",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"tmp": "^0.2.1",
"webpack": "^5.74.0",
"ts-loader": "^9.4.2",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
},
"bin": {
"scrypted-changelog": "bin/scrypted-changelog.js",
"scrypted-debug": "bin/scrypted-debug.js",
"scrypted-deploy": "bin/scrypted-deploy.js",
"scrypted-deploy-debug": "bin/scrypted-deploy-debug.js",
"scrypted-package-json": "bin/scrypted-package-json.js",
"scrypted-readme": "bin/scrypted-readme.js",
"scrypted-setup-project": "bin/scrypted-setup-project.js",
"scrypted-webpack": "bin/scrypted-webpack.js"
},
"devDependencies": {
"@types/node": "^16.11.1",
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"stringify-object": "^3.3.0",
"ts-node": "^10.4.0",
"typedoc": "^0.23.15"
"typedoc": "^0.23.21"
}
},
"../sdk": {
@@ -59,12 +61,12 @@
"@scrypted/sdk": {
"version": "file:../../sdk",
"requires": {
"@babel/preset-typescript": "^7.16.7",
"@types/node": "^16.11.1",
"@babel/preset-typescript": "^7.18.6",
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^0.21.4",
"babel-loader": "^8.2.3",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
@@ -72,9 +74,11 @@
"rimraf": "^3.0.2",
"stringify-object": "^3.3.0",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"ts-node": "^10.4.0",
"typedoc": "^0.23.15",
"webpack": "^5.74.0",
"typedoc": "^0.23.21",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"webpack-bundle-analyzer": "^4.5.0"
}
}

View File

@@ -36,5 +36,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.64"
"version": "0.0.66"
}

View File

@@ -3,6 +3,7 @@ from time import sleep
from detect import DetectionSession, DetectPlugin
from typing import Any, List, Tuple
import numpy as np
import asyncio
import cv2
import imutils
Gst = None
@@ -10,7 +11,7 @@ try:
from gi.repository import Gst
except:
pass
from scrypted_sdk.types import ObjectDetectionModel, ObjectDetectionResult, ObjectsDetected, Setting
from scrypted_sdk.types import ObjectDetectionModel, ObjectDetectionResult, ObjectsDetected, Setting, VideoFrame
from PIL import Image
class OpenCVDetectionSession(DetectionSession):
@@ -93,6 +94,9 @@ class OpenCVPlugin(DetectPlugin):
def get_pixel_format(self):
return self.pixelFormat
def get_input_format(self) -> str:
return 'gray'
def parse_settings(self, settings: Any):
area = defaultArea
@@ -106,7 +110,8 @@ class OpenCVPlugin(DetectPlugin):
blur = int(settings.get('blur', blur))
return area, threshold, interval, blur
def detect(self, detection_session: OpenCVDetectionSession, frame, settings: Any, src_size, convert_to_src_size) -> ObjectsDetected:
def detect(self, detection_session: OpenCVDetectionSession, frame, src_size, convert_to_src_size) -> ObjectsDetected:
settings = detection_session.settings
area, threshold, interval, blur = self.parse_settings(settings)
# see get_detection_input_size on undocumented size requirements for GRAY8
@@ -119,10 +124,15 @@ class OpenCVPlugin(DetectPlugin):
detection_session.curFrame = cv2.GaussianBlur(
gray, (blur, blur), 0, dst=detection_session.curFrame)
detections: List[ObjectDetectionResult] = []
detection_result: ObjectsDetected = {}
detection_result['detections'] = detections
detection_result['inputDimensions'] = src_size
if detection_session.previous_frame is None:
detection_session.previous_frame = detection_session.curFrame
detection_session.curFrame = None
return
return detection_result
detection_session.frameDelta = cv2.absdiff(
detection_session.previous_frame, detection_session.curFrame, dst=detection_session.frameDelta)
@@ -138,10 +148,6 @@ class OpenCVPlugin(DetectPlugin):
detection_session.dilated, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
contours = imutils.grab_contours(fcontours)
detections: List[ObjectDetectionResult] = []
detection_result: ObjectsDetected = {}
detection_result['detections'] = detections
detection_result['inputDimensions'] = src_size
for c in contours:
x, y, w, h = cv2.boundingRect(c)
@@ -163,6 +169,9 @@ class OpenCVPlugin(DetectPlugin):
detections.append(detection)
return detection_result
def get_input_details(self) -> Tuple[int, int, int]:
return (300, 300, 1)
def get_detection_input_size(self, src_size):
# The initial implementation of this plugin used BGRA
@@ -197,11 +206,45 @@ class OpenCVPlugin(DetectPlugin):
detection_session.cap = None
return super().end_session(detection_session)
def run_detection_image(self, detection_session: DetectionSession, image: Image.Image, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
async def run_detection_image(self, detection_session: DetectionSession, image: Image.Image, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
# todo
raise Exception('can not run motion detection on image')
async def run_detection_videoframe(self, videoFrame: VideoFrame, detection_session: OpenCVDetectionSession) -> ObjectsDetected:
width = videoFrame.width
height = videoFrame.height
def run_detection_avframe(self, detection_session: DetectionSession, avframe, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
aspectRatio = width / height
# dont bother resizing if its already fairly small
if width <= 640 and height < 640:
scale = 1
resize = None
elif aspectRatio > 1:
scale = height / 300
resize = {
'height': 300,
'width': int(300 * aspectRatio)
}
else:
scale = width / 300
resize = {
'width': 300,
'height': int(300 / aspectRatio)
}
buffer = await videoFrame.toBuffer({
'resize': resize,
})
def convert_to_src_size(point, normalize = False):
return point[0] * scale, point[1] * scale, True
mat = np.ndarray((videoFrame.height, videoFrame.width, self.pixelFormatChannelCount), buffer=buffer, dtype=np.uint8)
detections = self.detect(
detection_session, mat, (width, height), convert_to_src_size)
return detections
async def run_detection_avframe(self, detection_session: DetectionSession, avframe, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
if avframe.format.name != 'yuv420p' and avframe.format.name != 'yuvj420p':
mat = avframe.to_ndarray(format='gray8')
else:
@@ -209,11 +252,11 @@ class OpenCVPlugin(DetectPlugin):
detections = self.detect(
detection_session, mat, settings, src_size, convert_to_src_size)
if not detections or not len(detections['detections']):
self.detection_sleep(settings)
await self.detection_sleep(settings)
return None, None
return detections, None
def run_detection_gstsample(self, detection_session: OpenCVDetectionSession, gst_sample, settings: Any, src_size, convert_to_src_size) -> ObjectsDetected:
async def run_detection_gstsample(self, detection_session: OpenCVDetectionSession, gst_sample, settings: Any, src_size, convert_to_src_size) -> ObjectsDetected:
buf = gst_sample.get_buffer()
caps = gst_sample.get_caps()
# can't trust the width value, compute the stride
@@ -230,24 +273,24 @@ class OpenCVPlugin(DetectPlugin):
buffer=info.data,
dtype=np.uint8)
detections = self.detect(
detection_session, mat, settings, src_size, convert_to_src_size)
detection_session, mat, src_size, convert_to_src_size)
# no point in triggering empty events.
finally:
buf.unmap(info)
if not detections or not len(detections['detections']):
self.detection_sleep(settings)
await self.detection_sleep(settings)
return None, None
return detections, None
def create_detection_session(self):
return OpenCVDetectionSession()
def detection_sleep(self, settings: Any):
async def detection_sleep(self, settings: Any):
area, threshold, interval, blur = self.parse_settings(settings)
# it is safe to block here because gstreamer creates a queue thread
sleep(interval / 1000)
await asyncio.sleep(interval / 1000)
def detection_event_notified(self, settings: Any):
self.detection_sleep(settings)
return super().detection_event_notified(settings)
async def detection_event_notified(self, settings: Any):
await self.detection_sleep(settings)
return await super().detection_event_notified(settings)

View File

@@ -1,7 +1,7 @@
{
// docker installation
// "scrypted.debugHost": "koushik-thin",
// "scrypted.debugHost": "koushik-ubuntu",
// "scrypted.serverRoot": "/server",
// pi local installation

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/python-codecs",
"version": "0.0.23",
"version": "0.1.11",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/python-codecs",
"version": "0.0.23",
"version": "0.1.11",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -1,6 +1,7 @@
{
"name": "@scrypted/python-codecs",
"description": "Scrypted Python Codecs",
"version": "0.1.11",
"description": "Python Codecs for Scrypted",
"keywords": [
"scrypted",
"plugin",
@@ -24,11 +25,10 @@
"runtime": "python",
"type": "API",
"interfaces": [
"VideoFrameGenerator"
"DeviceProvider"
]
},
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.23"
}
}

View File

@@ -0,0 +1,136 @@
import concurrent.futures
import threading
import asyncio
from queue import Queue
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstBase', '1.0')
from gi.repository import GLib, GObject, Gst
GObject.threads_init()
Gst.init(None)
except:
pass
class Callback:
def __init__(self, callback) -> None:
if callback:
self.loop = asyncio.get_running_loop()
self.callback = callback
else:
self.loop = None
self.callback = None
def createPipelineIterator(pipeline: str):
pipeline = '{pipeline} ! queue leaky=downstream max-size-buffers=0 ! appsink name=appsink emit-signals=true sync=false max-buffers=-1 drop=true'.format(pipeline=pipeline)
print(pipeline)
gst = Gst.parse_launch(pipeline)
bus = gst.get_bus()
def on_bus_message(bus, message):
t = str(message.type)
# print(t)
if t == str(Gst.MessageType.EOS):
finish()
elif t == str(Gst.MessageType.WARNING):
err, debug = message.parse_warning()
print('Warning: %s: %s\n' % (err, debug))
elif t == str(Gst.MessageType.ERROR):
err, debug = message.parse_error()
print('Error: %s: %s\n' % (err, debug))
finish()
def stopGst():
bus.remove_signal_watch()
bus.disconnect(watchId)
gst.set_state(Gst.State.NULL)
def finish():
nonlocal hasFinished
hasFinished = True
callback = Callback(None)
callbackQueue.put(callback)
if not asyncFuture.done():
asyncFuture.set_result(None)
if not finished.done():
finished.set_result(None)
watchId = bus.connect('message', on_bus_message)
bus.add_signal_watch()
finished = concurrent.futures.Future()
finished.add_done_callback(lambda _: threading.Thread(target=stopGst, name="StopGst").start())
hasFinished = False
appsink = gst.get_by_name('appsink')
callbackQueue = Queue()
asyncFuture = asyncio.Future()
async def gen():
try:
while True:
nonlocal asyncFuture
asyncFuture = asyncio.Future()
yieldFuture = asyncio.Future()
async def asyncCallback(sample):
asyncFuture.set_result(sample)
await yieldFuture
callbackQueue.put(Callback(asyncCallback))
sample = await asyncFuture
if not sample:
yieldFuture.set_result(None)
break
try:
yield sample
finally:
yieldFuture.set_result(None)
finally:
finish()
print('gstreamer finished')
def on_new_sample(sink, preroll):
nonlocal hasFinished
sample = sink.emit('pull-preroll' if preroll else 'pull-sample')
callback: Callback = callbackQueue.get()
if not callback.callback or hasFinished:
hasFinished = True
if callback.callback:
asyncio.run_coroutine_threadsafe(callback.callback(None), loop = callback.loop)
return Gst.FlowReturn.OK
future = asyncio.run_coroutine_threadsafe(callback.callback(sample), loop = callback.loop)
try:
future.result()
except:
pass
return Gst.FlowReturn.OK
appsink.connect('new-preroll', on_new_sample, True)
appsink.connect('new-sample', on_new_sample, False)
gst.set_state(Gst.State.PLAYING)
return gst, gen
def mainThread():
async def asyncMain():
gst, gen = createPipelineIterator('rtspsrc location=rtsp://localhost:59668/18cc179a814fd5b3 ! rtph264depay ! h264parse ! vtdec_hw ! videoconvert ! video/x-raw')
i = 0
async for sample in gen():
print('sample')
i = i + 1
if i == 10:
break
loop = asyncio.new_event_loop()
asyncio.ensure_future(asyncMain(), loop = loop)
loop.run_forever()
if __name__ == "__main__":
threading.Thread(target = mainThread).start()
mainLoop = GLib.MainLoop()
mainLoop.run()

View File

@@ -1,132 +1,87 @@
import concurrent.futures
import threading
import asyncio
from queue import Queue
from gst_generator import createPipelineIterator
from util import optional_chain
import scrypted_sdk
from typing import Any
from urllib.parse import urlparse
import pyvips
from vips import createVipsMediaObject, VipsImage
import platform
Gst = None
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstBase', '1.0')
from gi.repository import GLib, GObject, Gst
GObject.threads_init()
Gst.init(None)
from gi.repository import Gst
except:
pass
class Callback:
def __init__(self, callback) -> None:
self.loop = asyncio.get_running_loop()
self.callback = callback
async def generateVideoFramesGstreamer(mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None, h264Decoder: str = None) -> scrypted_sdk.VideoFrame:
ffmpegInput: scrypted_sdk.FFmpegInput = await scrypted_sdk.mediaManager.convertMediaObjectToJSON(mediaObject, scrypted_sdk.ScryptedMimeTypes.FFmpegInput.value)
container = ffmpegInput.get('container', None)
videosrc = ffmpegInput.get('url')
videoCodec = optional_chain(ffmpegInput, 'mediaStreamOptions', 'video', 'codec')
def createPipelineIterator(pipeline: str):
pipeline = '{pipeline} ! queue leaky=downstream max-size-buffers=0 ! appsink name=appsink emit-signals=true sync=false max-buffers=-1 drop=true'.format(pipeline=pipeline)
print(pipeline)
gst = Gst.parse_launch(pipeline)
bus = gst.get_bus()
if videosrc.startswith('tcp://'):
parsed_url = urlparse(videosrc)
videosrc = 'tcpclientsrc port=%s host=%s' % (
parsed_url.port, parsed_url.hostname)
if container == 'mpegts':
videosrc += ' ! tsdemux'
elif container == 'sdp':
videosrc += ' ! sdpdemux'
else:
raise Exception('unknown container %s' % container)
elif videosrc.startswith('rtsp'):
videosrc = 'rtspsrc buffer-mode=0 location=%s protocols=tcp latency=0 is-live=false' % videosrc
if videoCodec == 'h264':
videosrc += ' ! rtph264depay ! h264parse'
def on_bus_message(bus, message):
t = str(message.type)
# print(t)
if t == str(Gst.MessageType.EOS):
finish()
elif t == str(Gst.MessageType.WARNING):
err, debug = message.parse_warning()
print('Warning: %s: %s\n' % (err, debug))
elif t == str(Gst.MessageType.ERROR):
err, debug = message.parse_error()
print('Error: %s: %s\n' % (err, debug))
finish()
videocaps = 'video/x-raw'
# if options and options.get('resize'):
# videocaps = 'videoscale ! video/x-raw,width={width},height={height}'.format(width=options['resize']['width'], height=options['resize']['height'])
def stopGst():
bus.remove_signal_watch()
bus.disconnect(watchId)
gst.set_state(Gst.State.NULL)
format = options and options.get('format')
# I420 is a cheap way to get gray out of an h264 stream without color conversion.
if format == 'gray':
format = 'I420'
bands = 1
else:
format = 'RGB'
bands = 3
videocaps += ',format={format}'.format(format=format)
def finish():
nonlocal hasFinished
hasFinished = True
callback = Callback(None)
callbackQueue.put(callback)
if not asyncFuture.done():
asyncFuture.set_result(None)
if not finished.done():
finished.set_result(None)
decoder = 'decodebin'
if videoCodec == 'h264':
decoder = h264Decoder or 'Default'
if decoder == 'Default':
if platform.system() == 'Darwin':
decoder = 'vtdec_hw'
else:
decoder = 'decodebin'
watchId = bus.connect('message', on_bus_message)
bus.add_signal_watch()
videosrc += ' ! {decoder} ! queue leaky=downstream max-size-buffers=0 ! videoconvert ! {videocaps}'.format(decoder=decoder, videocaps=videocaps)
finished = concurrent.futures.Future()
finished.add_done_callback(lambda _: threading.Thread(target=stopGst, name="StopGst").start())
hasFinished = False
gst, gen = createPipelineIterator(videosrc)
async for gstsample in gen():
caps = gstsample.get_caps()
height = caps.get_structure(0).get_value('height')
width = caps.get_structure(0).get_value('width')
gst_buffer = gstsample.get_buffer()
result, info = gst_buffer.map(Gst.MapFlags.READ)
if not result:
continue
appsink = gst.get_by_name('appsink')
callbackQueue = Queue()
asyncFuture = asyncio.Future()
async def gen():
try:
while True:
nonlocal asyncFuture
asyncFuture = asyncio.Future()
yieldFuture = asyncio.Future()
async def asyncCallback(sample):
asyncFuture.set_result(sample)
await yieldFuture
callbackQueue.put(Callback(asyncCallback))
sample = await asyncFuture
if not sample:
yieldFuture.set_result(None)
break
try:
yield sample
finally:
yieldFuture.set_result(None)
finally:
finish()
print('gstreamer finished')
def on_new_sample(sink, preroll):
nonlocal hasFinished
sample = sink.emit('pull-preroll' if preroll else 'pull-sample')
callback: Callback = callbackQueue.get()
if not callback.callback or hasFinished:
hasFinished = True
if callback.callback:
asyncio.run_coroutine_threadsafe(callback.callback(None), loop = callback.loop)
return Gst.FlowReturn.OK
future = asyncio.run_coroutine_threadsafe(callback.callback(sample), loop = callback.loop)
try:
future.result()
except:
pass
return Gst.FlowReturn.OK
appsink.connect('new-preroll', on_new_sample, True)
appsink.connect('new-sample', on_new_sample, False)
gst.set_state(Gst.State.PLAYING)
return gst, gen
def mainThread():
async def asyncMain():
gst, gen = createPipelineIterator('rtspsrc location=rtsp://localhost:59668/18cc179a814fd5b3 ! rtph264depay ! h264parse ! vtdec_hw ! videoconvert ! video/x-raw')
i = 0
async for sample in gen():
print('sample')
i = i + 1
if i == 10:
break
loop = asyncio.new_event_loop()
asyncio.ensure_future(asyncMain(), loop = loop)
loop.run_forever()
if __name__ == "__main__":
threading.Thread(target = mainThread).start()
mainLoop = GLib.MainLoop()
mainLoop.run()
vips = pyvips.Image.new_from_memory(info.data, width, height, bands, pyvips.BandFormat.UCHAR)
vipsImage = VipsImage(vips)
try:
mo = await createVipsMediaObject(VipsImage(vips))
yield mo
finally:
vipsImage.vipsImage.invalidate()
vipsImage.vipsImage = None
finally:
gst_buffer.unmap(info)

View File

@@ -0,0 +1,51 @@
import time
from gst_generator import createPipelineIterator
import scrypted_sdk
from typing import Any
import pyvips
from vips import createVipsMediaObject, VipsImage
av = None
try:
import av
av.logging.set_level(av.logging.PANIC)
except:
pass
async def generateVideoFramesLibav(mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
ffmpegInput: scrypted_sdk.FFmpegInput = await scrypted_sdk.mediaManager.convertMediaObjectToJSON(mediaObject, scrypted_sdk.ScryptedMimeTypes.FFmpegInput.value)
videosrc = ffmpegInput.get('url')
container = av.open(videosrc)
# none of this stuff seems to work. might be libav being slow with rtsp.
# container.no_buffer = True
# container.gen_pts = False
# container.options['-analyzeduration'] = '0'
# container.options['-probesize'] = '500000'
stream = container.streams.video[0]
# stream.codec_context.thread_count = 1
# stream.codec_context.low_delay = True
# stream.codec_context.options['-analyzeduration'] = '0'
# stream.codec_context.options['-probesize'] = '500000'
start = 0
try:
for idx, frame in enumerate(container.decode(stream)):
now = time.time()
if not start:
start = now
elapsed = now - start
if (frame.time or 0) < elapsed - 0.500:
# print('too slow, skipping frame')
continue
# print(frame)
vips = pyvips.Image.new_from_array(frame.to_ndarray(format='rgb24'))
vipsImage = VipsImage(vips)
try:
mo = await createVipsMediaObject(VipsImage(vips))
yield mo
finally:
vipsImage.vipsImage.invalidate()
vipsImage.vipsImage = None
finally:
container.close()

View File

@@ -1,165 +1,136 @@
from gstreamer import createPipelineIterator
import asyncio
from util import optional_chain
import scrypted_sdk
from scrypted_sdk import Setting, SettingValue
from typing import Any
from urllib.parse import urlparse
import pyvips
import threading
import traceback
import concurrent.futures
import gstreamer
import libav
import vips
Gst = None
try:
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstBase', '1.0')
from gi.repository import Gst
except:
pass
vipsExecutor = concurrent.futures.ThreadPoolExecutor(max_workers=2, thread_name_prefix="vips")
av = None
try:
import av
except:
pass
async def to_thread(f):
loop = asyncio.get_running_loop()
return await loop.run_in_executor(vipsExecutor, f)
class LibavGenerator(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.VideoFrameGenerator):
async def generateVideoFrames(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
worker = scrypted_sdk.fork()
forked: CodecFork = await worker.result
return await forked.generateVideoFramesLibav(mediaObject, options, filter)
class VipsImage(scrypted_sdk.VideoFrame):
def __init__(self, vipsImage: pyvips.Image) -> None:
super().__init__()
self.vipsImage = vipsImage
self.width = vipsImage.width
self.height = vipsImage.height
async def toBuffer(self, options: scrypted_sdk.ImageOptions = None) -> bytearray:
vipsImage: VipsImage = await self.toVipsImage(options)
if not options or not options.get('format', None):
def format():
return memoryview(vipsImage.vipsImage.write_to_memory())
return await to_thread(format)
elif options['format'] == 'rgb':
def format():
if vipsImage.vipsImage.hasalpha():
rgb = vipsImage.vipsImage.extract_band(0, vipsImage.vipsImage.bands - 1)
else:
rgb = vipsImage.vipsImage
mem = memoryview(rgb.write_to_memory())
return mem
return await to_thread(format)
return await to_thread(lambda: vipsImage.vipsImage.write_to_buffer('.' + options['format']))
async def toVipsImage(self, options: scrypted_sdk.ImageOptions = None):
return await to_thread(lambda: toVipsImage(self, options))
async def toImage(self, options: scrypted_sdk.ImageOptions = None) -> Any:
if options and options.get('format', None):
raise Exception('format can only be used with toBuffer')
newVipsImage = await self.toVipsImage(options)
return await createVipsMediaObject(newVipsImage)
def toVipsImage(vipsImageWrapper: VipsImage, options: scrypted_sdk.ImageOptions = None) -> VipsImage:
vipsImage = vipsImageWrapper.vipsImage
if not vipsImage:
raise Exception('Video Frame has been invalidated')
options = options or {}
crop = options.get('crop')
if crop:
vipsImage = vipsImage.crop(int(crop['left']), int(crop['top']), int(crop['width']), int(crop['height']))
resize = options.get('resize')
if resize:
xscale = None
if resize.get('width'):
xscale = resize['width'] / vipsImage.width
scale = xscale
yscale = None
if resize.get('height'):
yscale = resize['height'] / vipsImage.height
scale = yscale
if xscale and yscale:
scale = min(yscale, xscale)
xscale = xscale or yscale
yscale = yscale or xscale
vipsImage = vipsImage.resize(xscale, vscale=yscale, kernel='linear')
return VipsImage(vipsImage)
async def createVipsMediaObject(image: VipsImage):
ret = await scrypted_sdk.mediaManager.createMediaObject(image, scrypted_sdk.ScryptedMimeTypes.Image.value, {
'width': image.width,
'height': image.height,
'toBuffer': lambda options = None: image.toBuffer(options),
'toImage': lambda options = None: image.toImage(options),
})
return ret
class PythonCodecs(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.VideoFrameGenerator):
class GstreamerGenerator(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.VideoFrameGenerator, scrypted_sdk.Settings):
async def generateVideoFrames(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
worker = scrypted_sdk.fork()
forked: CodecFork = await worker.result
return await forked.generateVideoFramesGstreamer(mediaObject, options, filter, self.storage.getItem('h264Decoder'))
async def getSettings(self) -> list[Setting]:
return [
{
'key': 'h264Decoder',
'title': 'H264 Decoder',
'description': 'The Gstreamer pipeline to use to decode H264 video.',
'value': self.storage.getItem('h264Decoder') or 'Default',
'choices': [
'Default',
'decodebin',
'vtdec_hw',
'nvh264dec',
'vaapih264dec',
],
'combobox': True,
}
]
async def putSetting(self, key: str, value: SettingValue) -> None:
self.storage.setItem(key, value)
await scrypted_sdk.deviceManager.onDeviceEvent(self.nativeId, scrypted_sdk.ScryptedInterface.Settings.value, None)
class PythonCodecs(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.DeviceProvider):
def __init__(self, nativeId = None):
super().__init__(nativeId)
async def generateVideoFrames(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
worker = scrypted_sdk.fork()
forked = await worker.result
return await forked.generateVideoFrames(mediaObject, options, filter)
asyncio.ensure_future(self.initialize())
async def initialize(self):
manifest: scrypted_sdk.DeviceManifest = {
'devices': [],
}
if Gst:
gstDevice: scrypted_sdk.Device = {
'name': 'Gstreamer',
'nativeId': 'gstreamer',
'interfaces': [
scrypted_sdk.ScryptedInterface.VideoFrameGenerator.value,
scrypted_sdk.ScryptedInterface.Settings.value,
],
'type': scrypted_sdk.ScryptedDeviceType.API.value,
}
manifest['devices'].append(gstDevice)
if av:
avDevice: scrypted_sdk.Device = {
'name': 'Libav',
'nativeId': 'libav',
'interfaces': [
scrypted_sdk.ScryptedInterface.VideoFrameGenerator.value,
],
'type': scrypted_sdk.ScryptedDeviceType.API.value,
}
manifest['devices'].append(avDevice)
manifest['devices'].append({
'name': 'Image Reader',
'type': scrypted_sdk.ScryptedDeviceType.Builtin.value,
'nativeId': 'reader',
'interfaces': [
scrypted_sdk.ScryptedInterface.BufferConverter.value,
]
})
manifest['devices'].append({
'name': 'Image Writer',
'type': scrypted_sdk.ScryptedDeviceType.Builtin.value,
'nativeId': 'writer',
'interfaces': [
scrypted_sdk.ScryptedInterface.BufferConverter.value,
]
})
await scrypted_sdk.deviceManager.onDevicesChanged(manifest)
def getDevice(self, nativeId: str) -> Any:
if nativeId == 'gstreamer':
return GstreamerGenerator('gstreamer')
if nativeId == 'libav':
return LibavGenerator('libav')
if nativeId == 'reader':
return vips.ImageReader('reader')
if nativeId == 'writer':
return vips.ImageWriter('writer')
def create_scrypted_plugin():
return PythonCodecs()
async def generateVideoFrames(mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
ffmpegInput: scrypted_sdk.FFmpegInput = await scrypted_sdk.mediaManager.convertMediaObjectToJSON(mediaObject, scrypted_sdk.ScryptedMimeTypes.FFmpegInput.value)
container = ffmpegInput.get('container', None)
videosrc = ffmpegInput.get('url')
videoCodec = optional_chain(ffmpegInput, 'mediaStreamOptions', 'video', 'codec')
if videosrc.startswith('tcp://'):
parsed_url = urlparse(videosrc)
videosrc = 'tcpclientsrc port=%s host=%s' % (
parsed_url.port, parsed_url.hostname)
if container == 'mpegts':
videosrc += ' ! tsdemux'
elif container == 'sdp':
videosrc += ' ! sdpdemux'
else:
raise Exception('unknown container %s' % container)
elif videosrc.startswith('rtsp'):
videosrc = 'rtspsrc buffer-mode=0 location=%s protocols=tcp latency=0 is-live=false' % videosrc
if videoCodec == 'h264':
videosrc += ' ! rtph264depay ! h264parse'
videosrc += ' ! decodebin ! queue leaky=downstream max-size-buffers=0 ! videoconvert ! video/x-raw,format=RGB'
gst, gen = createPipelineIterator(videosrc)
async for gstsample in gen():
caps = gstsample.get_caps()
height = caps.get_structure(0).get_value('height')
width = caps.get_structure(0).get_value('width')
gst_buffer = gstsample.get_buffer()
result, info = gst_buffer.map(Gst.MapFlags.READ)
if not result:
continue
try:
# pyvips.Image.new_from_memory(info.data, width, height, 3, pyvips.BandFormat.UCHAR)
vips = pyvips.Image.new_from_memory(info.data, width, height, 3, pyvips.BandFormat.UCHAR)
vipsImage = VipsImage(vips)
try:
mo = await createVipsMediaObject(VipsImage(vips))
yield mo
finally:
vipsImage.vipsImage.invalidate()
vipsImage.vipsImage = None
finally:
gst_buffer.unmap(info)
class CodecFork:
async def generateVideoFrames(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
async def generateVideoFramesGstreamer(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None, h264Decoder: str = None) -> scrypted_sdk.VideoFrame:
try:
async for data in generateVideoFrames(mediaObject, options, filter):
async for data in gstreamer.generateVideoFramesGstreamer(mediaObject, options, filter, h264Decoder):
yield data
finally:
import os
os._exit(os.EX_OK)
pass
async def generateVideoFramesLibav(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
try:
async for data in libav.generateVideoFramesLibav(mediaObject, options, filter):
yield data
finally:
import os

View File

@@ -0,0 +1,112 @@
import time
from gst_generator import createPipelineIterator
import asyncio
from util import optional_chain
import scrypted_sdk
from typing import Any
from urllib.parse import urlparse
import pyvips
import concurrent.futures
# vips is already multithreaded, but needs to be kicked off the python asyncio thread.
vipsExecutor = concurrent.futures.ThreadPoolExecutor(max_workers=2, thread_name_prefix="vips")
async def to_thread(f):
loop = asyncio.get_running_loop()
return await loop.run_in_executor(vipsExecutor, f)
class VipsImage(scrypted_sdk.VideoFrame):
def __init__(self, vipsImage: pyvips.Image) -> None:
super().__init__()
self.vipsImage = vipsImage
self.width = vipsImage.width
self.height = vipsImage.height
async def toBuffer(self, options: scrypted_sdk.ImageOptions = None) -> bytearray:
vipsImage: VipsImage = await self.toVipsImage(options)
if not options or not options.get('format', None):
def format():
return memoryview(vipsImage.vipsImage.write_to_memory())
return await to_thread(format)
elif options['format'] == 'rgb':
def format():
if vipsImage.vipsImage.hasalpha():
rgb = vipsImage.vipsImage.extract_band(0, vipsImage.vipsImage.bands - 1)
else:
rgb = vipsImage.vipsImage
mem = memoryview(rgb.write_to_memory())
return mem
return await to_thread(format)
return await to_thread(lambda: vipsImage.vipsImage.write_to_buffer('.' + options['format']))
async def toVipsImage(self, options: scrypted_sdk.ImageOptions = None):
return await to_thread(lambda: toVipsImage(self, options))
async def toImage(self, options: scrypted_sdk.ImageOptions = None) -> Any:
if options and options.get('format', None):
raise Exception('format can only be used with toBuffer')
newVipsImage = await self.toVipsImage(options)
return await createVipsMediaObject(newVipsImage)
def toVipsImage(vipsImageWrapper: VipsImage, options: scrypted_sdk.ImageOptions = None) -> VipsImage:
vipsImage = vipsImageWrapper.vipsImage
if not vipsImage:
raise Exception('Video Frame has been invalidated')
options = options or {}
crop = options.get('crop')
if crop:
vipsImage = vipsImage.crop(int(crop['left']), int(crop['top']), int(crop['width']), int(crop['height']))
resize = options.get('resize')
if resize:
xscale = None
if resize.get('width'):
xscale = resize['width'] / vipsImage.width
scale = xscale
yscale = None
if resize.get('height'):
yscale = resize['height'] / vipsImage.height
scale = yscale
if xscale and yscale:
scale = min(yscale, xscale)
xscale = xscale or yscale
yscale = yscale or xscale
vipsImage = vipsImage.resize(xscale, vscale=yscale, kernel='linear')
return VipsImage(vipsImage)
async def createVipsMediaObject(image: VipsImage):
ret = await scrypted_sdk.mediaManager.createMediaObject(image, scrypted_sdk.ScryptedMimeTypes.Image.value, {
'width': image.width,
'height': image.height,
'toBuffer': lambda options = None: image.toBuffer(options),
'toImage': lambda options = None: image.toImage(options),
})
return ret
class ImageReader(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter):
def __init__(self, nativeId: str):
super().__init__(nativeId)
self.fromMimeType = 'image/*'
self.toMimeType = scrypted_sdk.ScryptedMimeTypes.Image.value
async def convert(self, data: Any, fromMimeType: str, toMimeType: str, options: scrypted_sdk.MediaObjectOptions = None) -> Any:
vips = pyvips.Image.new_from_buffer(data, '')
return await createVipsMediaObject(VipsImage(vips))
class ImageWriter(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter):
def __init__(self, nativeId: str):
super().__init__(nativeId)
self.fromMimeType = scrypted_sdk.ScryptedMimeTypes.Image.value
self.toMimeType = 'image/*'
async def convert(self, data: scrypted_sdk.VideoFrame, fromMimeType: str, toMimeType: str, options: scrypted_sdk.MediaObjectOptions = None) -> Any:
return await data.toBuffer({
format: 'jpg',
})

View File

@@ -9,7 +9,9 @@ Do not enable prebuffer on Ring cameras and doorbells.
* The persistent live stream will drain the battery faster than it can charge.
* The persistent live stream will also count against ISP bandwidth limits.
## Supported Cameras
## Supported Devices
### Cameras
- Ring Video Doorbell Wired, Pro, Pro 2, 4, 3, 2nd Gen
- Ring Floodlight Cam Wired Plus
- Ring Floodlight Cam Wired Pro
@@ -17,6 +19,15 @@ Do not enable prebuffer on Ring cameras and doorbells.
- Ring Indoor Cam
- Ring Stick-Up Cam (Wired and Battery)
### Other Devices
- Security Panel
- Location Modes
- Contact Sensor / Retrofit Alarm Zones / Tilt Sensor
- Motion Sensor
- Flood / Freeze Sensor
- Water Sensor
- Smart Locks
## Problems and Solutions
I can see artifacts in HKSV recordings

View File

@@ -1,17 +1,17 @@
{
"name": "@scrypted/ring",
"version": "0.0.98",
"version": "0.0.100",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/ring",
"version": "0.0.98",
"version": "0.0.100",
"dependencies": {
"@koush/ring-client-api": "file:../../external/ring-client-api",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^18.14.5",
"@types/node": "^18.15.3",
"axios": "^1.3.4",
"rxjs": "^7.8.0"
},
@@ -49,7 +49,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.82",
"version": "0.2.85",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -148,9 +148,9 @@
}
},
"node_modules/@types/node": {
"version": "18.14.5",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.5.tgz",
"integrity": "sha512-CRT4tMK/DHYhw1fcCEBwME9CSaZNclxfzVMe7GsO6ULSwsttbj70wSiX6rZdIjGblu93sTJxLdhNIT85KKI7Qw=="
"version": "18.15.3",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.3.tgz",
"integrity": "sha512-p6ua9zBxz5otCmbpb5D3U4B5Nanw6Pk3PPyX05xnxbB/fRv71N7CPmORg7uAD5P70T0xmx1pzAx/FUfa5X+3cw=="
},
"node_modules/@types/responselike": {
"version": "1.0.0",

View File

@@ -36,7 +36,7 @@
"@koush/ring-client-api": "file:../../external/ring-client-api",
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/node": "^18.14.5",
"@types/node": "^18.15.3",
"axios": "^1.3.4",
"rxjs": "^7.8.0"
},
@@ -44,5 +44,5 @@
"got": "11.8.6",
"socket.io-client": "^2.5.0"
},
"version": "0.0.98"
"version": "0.0.100"
}

View File

@@ -3,15 +3,15 @@ import { RefreshPromise } from "@scrypted/common/src/promise-utils";
import { connectRTCSignalingClients } from '@scrypted/common/src/rtc-signaling';
import { RtspServer } from '@scrypted/common/src/rtsp-server';
import { addTrackControls, parseSdp, replacePorts } from '@scrypted/common/src/sdp-utils';
import sdk, { Battery, BinarySensor, Camera, Device, DeviceProvider, EntrySensor, FFmpegInput, FloodSensor, Lock, LockState, MediaObject, MediaStreamUrl, MotionSensor, OnOff, PictureOptions, RequestMediaStreamOptions, RequestPictureOptions, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, SecuritySystem, SecuritySystemMode, Setting, Settings, SettingValue, TamperSensor, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import sdk, { Battery, BinarySensor, Camera, Device, DeviceProvider, EntrySensor, FFmpegInput, FloodSensor, MediaObject, MediaStreamUrl, MotionSensor, OnOff, PictureOptions, RequestMediaStreamOptions, RequestPictureOptions, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, SecuritySystem, SecuritySystemMode, Setting, Settings, SettingValue, TamperSensor, VideoCamera } from '@scrypted/sdk';
import child_process, { ChildProcess } from 'child_process';
import dgram from 'dgram';
import { RtcpReceiverInfo, RtcpRrPacket } from '../../../external/werift/packages/rtp/src/rtcp/rr';
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
import { ProtectionProfileAes128CmHmacSha1_80 } from '../../../external/werift/packages/rtp/src/srtp/const';
import { SrtcpSession } from '../../../external/werift/packages/rtp/src/srtp/srtcp';
import { Location, LocationMode, RingDevice, isStunMessage, RtpDescription, SipSession, BasicPeerConnection, CameraData, clientApi, generateUuid, RingBaseApi, RingRestClient, rxjs, SimpleWebRtcSession, StreamingSession, RingDeviceType, RingDeviceData } from './ring-client-api';
import { BasicPeerConnection, CameraData, clientApi, generateUuid, isStunMessage, Location, LocationMode, RingBaseApi, RingDevice, RingDeviceData, RingDeviceType, RingRestClient, RtpDescription, rxjs, SimpleWebRtcSession, SipSession, StreamingSession } from './ring-client-api';
import { encodeSrtpOptions, getPayloadType, getSequenceNumber, isRtpMessagePayloadType } from './srtp-utils';
const STREAM_TIMEOUT = 120000;
@@ -79,7 +79,7 @@ class RingCameraSiren extends ScryptedDeviceBase implements OnOff {
}
}
class RingCameraDevice extends ScryptedDeviceBase implements DeviceProvider, Camera, MotionSensor, BinarySensor, RTCSignalingChannel {
class RingCameraDevice extends ScryptedDeviceBase implements DeviceProvider, Camera, MotionSensor, BinarySensor, RTCSignalingChannel, VideoClips {
buttonTimeout: NodeJS.Timeout;
session: SipSession;
rtpDescription: RtpDescription;
@@ -89,6 +89,7 @@ class RingCameraDevice extends ScryptedDeviceBase implements DeviceProvider, Cam
currentMediaMimeType: string;
refreshTimeout: NodeJS.Timeout;
picturePromise: RefreshPromise<Buffer>;
videoClips = new Map<string, VideoClip>();
constructor(public plugin: RingPlugin, public location: RingLocationDevice, nativeId: string) {
super(nativeId);
@@ -98,7 +99,6 @@ class RingCameraDevice extends ScryptedDeviceBase implements DeviceProvider, Cam
this.batteryLevel = this.findCamera()?.batteryLevel;
}
async startIntercom(media: MediaObject): Promise<void> {
if (!this.session)
throw new Error("not in call");
@@ -659,9 +659,100 @@ class RingCameraDevice extends ScryptedDeviceBase implements DeviceProvider, Cam
siren.on = data.siren_status.seconds_remaining > 0 ? true : false;
}
}
async getVideoClips(options?: VideoClipOptions): Promise<VideoClip[]> {
this.videoClips = new Map<string, VideoClip>;
const response = await this.findCamera().videoSearch({
dateFrom: options.startTime,
dateTo: options.endTime,
});
return response.video_search.map((result) => {
const videoClip = {
id: result.ding_id,
startTime: result.created_at,
duration: Math.round(result.duration * 1000),
event: result.kind.toString(),
description: result.kind.toString(),
thumbnailId: result.ding_id,
resources: {
thumbnail: {
href: result.thumbnail_url
},
video: {
href: result.hq_url
}
}
}
this.videoClips.set(result.ding_id, videoClip)
return videoClip;
});
}
async getVideoClip(videoId: string): Promise<MediaObject> {
if (this.videoClips.has(videoId)) {
return mediaManager.createMediaObjectFromUrl(this.videoClips.get(videoId).resources.video.href);
}
throw new Error('Failed to get video clip.')
}
async getVideoClipThumbnail(thumbnailId: string): Promise<MediaObject> {
if (this.videoClips.has(thumbnailId)) {
return mediaManager.createMediaObjectFromUrl(this.videoClips.get(thumbnailId).resources.thumbnail.href);
}
throw new Error('Failed to get video clip thumbnail.')
}
async removeVideoClips(...videoClipIds: string[]): Promise<void> {
throw new Error('Removing video clips not supported.');
}
}
class RingLock extends ScryptedDeviceBase implements Battery, Lock {
device: RingDevice
constructor(nativeId: string, device: RingDevice) {
super(nativeId);
this.device = device;
device.onData.subscribe(async (data: RingDeviceData) => {
this.updateState(data);
});
}
async lock(): Promise<void> {
return this.device.sendCommand('lock.lock');
}
async unlock(): Promise<void> {
return this.device.sendCommand('lock.unlock');
}
updateState(data: RingDeviceData) {
this.batteryLevel = data.batteryLevel;
switch (data.locked) {
case 'locked':
this.lockState = LockState.Locked;
break;
case 'unlocked':
this.lockState = LockState.Unlocked;
break;
case 'jammed':
this.lockState = LockState.Jammed;
break;
default:
this.lockState = undefined;
}
}
}
class RingSensor extends ScryptedDeviceBase implements TamperSensor, Battery, EntrySensor, MotionSensor, FloodSensor {
constructor(nativeId: string, device: RingDevice) {
super(nativeId);
device.onData.subscribe(async (data: RingDeviceData) => {
this.updateState(data);
});
}
updateState(data: RingDeviceData) {
this.tampered = data.tamperStatus === 'tamper';
this.batteryLevel = data.batteryLevel;
@@ -673,6 +764,7 @@ class RingSensor extends ScryptedDeviceBase implements TamperSensor, Battery, En
export class RingLocationDevice extends ScryptedDeviceBase implements DeviceProvider, SecuritySystem {
devices = new Map<string, any>();
locationDevices = new Map<string, RingDevice>();
constructor(public plugin: RingPlugin, nativeId: string) {
super(nativeId);
@@ -762,11 +854,21 @@ export class RingLocationDevice extends ScryptedDeviceBase implements DeviceProv
return this.plugin.locations.find(l => l.id === this.nativeId);
}
async findRingDeviceAtLocation(id: string): Promise<RingDevice> {
const location = this.findLocation();
return (await location.getDevices()).find((x) => x.id === id);
}
async getDevice(nativeId: string) {
if (!this.devices.has(nativeId)) {
if (nativeId.endsWith('-sensor')) {
const sensor = new RingSensor(nativeId);
this.devices.set(nativeId, sensor);
const ringRevice = await this.findRingDeviceAtLocation(nativeId.replace('-sensor', ''));
const device = new RingSensor(nativeId, ringRevice);
this.devices.set(nativeId, device);
} else if (nativeId.endsWith('-lock')) {
const ringRevice = await this.findRingDeviceAtLocation(nativeId.replace('-lock', ''));
const device = new RingLock(nativeId, ringRevice);
this.devices.set(nativeId, device);
} else {
const camera = new RingCameraDevice(this.plugin, this, nativeId);
this.devices.set(nativeId, camera);
@@ -967,6 +1069,7 @@ class RingPlugin extends ScryptedDeviceBase implements DeviceProvider, Settings
interfaces.push(
ScryptedInterface.VideoCamera,
ScryptedInterface.Intercom,
ScryptedInterface.VideoClips,
);
}
if (camera.operatingOnBattery)
@@ -1028,41 +1131,52 @@ class RingPlugin extends ScryptedDeviceBase implements DeviceProvider, Settings
});
}
const sensors = (await location.getDevices()).filter(x => {
const supportedSensors = [
RingDeviceType.ContactSensor,
RingDeviceType.RetrofitZone,
RingDeviceType.TiltSensor,
RingDeviceType.MotionSensor,
RingDeviceType.FloodFreezeSensor,
RingDeviceType.WaterSensor,
]
return x.data.status !== 'disabled' && (supportedSensors.includes(x.data.deviceType))
});
for (const sensor of sensors) {
const nativeId = sensor.id.toString() + '-sensor';
const data: RingDeviceData = sensor.data;
// add location devices
const locationDevices = await location.getDevices();
for (const locationDevice of locationDevices) {
const data: RingDeviceData = locationDevice.data;
let nativeId: string;
let type: ScryptedDeviceType;
let interfaces: ScryptedInterface[] = [];
const interfaces = [ScryptedInterface.TamperSensor];
switch (data.deviceType){
if (data.status === 'disabled') {
continue;
}
switch (data.deviceType) {
case RingDeviceType.ContactSensor:
case RingDeviceType.RetrofitZone:
case RingDeviceType.TiltSensor:
interfaces.push(ScryptedInterface.EntrySensor);
nativeId = locationDevice.id.toString() + '-sensor';
type = ScryptedDeviceType.Sensor
interfaces.push(ScryptedInterface.TamperSensor, ScryptedInterface.EntrySensor);
break;
case RingDeviceType.MotionSensor:
interfaces.push(ScryptedInterface.MotionSensor);
nativeId = locationDevice.id.toString() + '-sensor';
type = ScryptedDeviceType.Sensor
interfaces.push(ScryptedInterface.TamperSensor, ScryptedInterface.MotionSensor);
break;
case RingDeviceType.FloodFreezeSensor:
case RingDeviceType.WaterSensor:
interfaces.push(ScryptedInterface.FloodSensor);
nativeId = locationDevice.id.toString() + '-sensor';
type = ScryptedDeviceType.Sensor
interfaces.push(ScryptedInterface.TamperSensor, ScryptedInterface.FloodSensor);
break;
default: break;
default:
if (/^lock($|\.)/.test(data.deviceType)) {
nativeId = locationDevice.id.toString() + '-lock';
type = ScryptedDeviceType.Lock
interfaces.push(ScryptedInterface.Lock);
break;
} else {
this.console.debug(`discovered and ignoring unsupported '${locationDevice.deviceType}' device: '${locationDevice.name}'`)
continue;
}
}
if (data.batteryStatus !== 'none')
interfaces.push(ScryptedInterface.Battery);
const device: Device = {
info: {
model: data.deviceType,
@@ -1071,22 +1185,11 @@ class RingPlugin extends ScryptedDeviceBase implements DeviceProvider, Settings
},
providerNativeId: location.id,
nativeId: nativeId,
name: sensor.name,
type: ScryptedDeviceType.Sensor,
name: locationDevice.name,
type: type,
interfaces,
};
devices.push(device);
const getScryptedDevice = async () => {
const locationDevice = await this.getDevice(location.id);
const scryptedDevice = await locationDevice?.getDevice(nativeId);
return scryptedDevice as RingSensor;
}
sensor.onData.subscribe(async (data: RingDeviceData) => {
const scryptedDevice = await getScryptedDevice();
scryptedDevice?.updateState(data)
});
}
await deviceManager.onDevicesChanged({

View File

@@ -1,17 +1,16 @@
{
"name": "@scrypted/snapshot",
"version": "0.0.48",
"version": "0.0.52",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/snapshot",
"version": "0.0.48",
"version": "0.0.52",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@types/node": "^16.6.1",
"axios": "^0.24.0",
"sharp": "^0.31.3",
"whatwg-mimetype": "^3.0.0"
},
"devDependencies": {
@@ -39,7 +38,7 @@
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.2.84",
"version": "0.2.85",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -133,146 +132,6 @@
"follow-redirects": "^1.14.4"
}
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/bl": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"dependencies": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
}
},
"node_modules/buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
}
},
"node_modules/chownr": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
},
"node_modules/color": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
"integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
"dependencies": {
"color-convert": "^2.0.1",
"color-string": "^1.9.0"
},
"engines": {
"node": ">=12.5.0"
}
},
"node_modules/color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"dependencies": {
"color-name": "~1.1.4"
},
"engines": {
"node": ">=7.0.0"
}
},
"node_modules/color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"node_modules/color-string": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
"dependencies": {
"color-name": "^1.0.0",
"simple-swizzle": "^0.2.2"
}
},
"node_modules/decompress-response": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
"integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==",
"dependencies": {
"mimic-response": "^3.1.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/deep-extend": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/detect-libc": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz",
"integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==",
"engines": {
"node": ">=8"
}
},
"node_modules/end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
"dependencies": {
"once": "^1.4.0"
}
},
"node_modules/expand-template": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz",
"integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==",
"engines": {
"node": ">=6"
}
},
"node_modules/follow-redirects": {
"version": "1.14.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz",
@@ -292,339 +151,6 @@
}
}
},
"node_modules/fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
},
"node_modules/github-from-package": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz",
"integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="
},
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"node_modules/ini": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
},
"node_modules/is-arrayish": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="
},
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/mimic-response": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
"integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==",
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/minimist": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/mkdirp-classic": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"node_modules/napi-build-utils": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz",
"integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg=="
},
"node_modules/node-abi": {
"version": "3.33.0",
"resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.33.0.tgz",
"integrity": "sha512-7GGVawqyHF4pfd0YFybhv/eM9JwTtPqx0mAanQ146O3FlSh3pA24zf9IRQTOsfTSqXTNzPSP5iagAJ94jjuVog==",
"dependencies": {
"semver": "^7.3.5"
},
"engines": {
"node": ">=10"
}
},
"node_modules/node-addon-api": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
},
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/prebuild-install": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz",
"integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==",
"dependencies": {
"detect-libc": "^2.0.0",
"expand-template": "^2.0.3",
"github-from-package": "0.0.0",
"minimist": "^1.2.3",
"mkdirp-classic": "^0.5.3",
"napi-build-utils": "^1.0.1",
"node-abi": "^3.3.0",
"pump": "^3.0.0",
"rc": "^1.2.7",
"simple-get": "^4.0.0",
"tar-fs": "^2.0.0",
"tunnel-agent": "^0.6.0"
},
"bin": {
"prebuild-install": "bin.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
"dependencies": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"node_modules/rc": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
"dependencies": {
"deep-extend": "^0.6.0",
"ini": "~1.3.0",
"minimist": "^1.2.0",
"strip-json-comments": "~2.0.1"
},
"bin": {
"rc": "cli.js"
}
},
"node_modules/readable-stream": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.1.tgz",
"integrity": "sha512-+rQmrWMYGA90yenhTYsLWAsLsqVC8osOw6PKE1HDYiO0gdPeKe/xDHNzIAIn4C91YQ6oenEhfYqqc1883qHbjQ==",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/sharp": {
"version": "0.31.3",
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.31.3.tgz",
"integrity": "sha512-XcR4+FCLBFKw1bdB+GEhnUNXNXvnt0tDo4WsBsraKymuo/IAuPuCBVAL2wIkUw2r/dwFW5Q5+g66Kwl2dgDFVg==",
"hasInstallScript": true,
"dependencies": {
"color": "^4.2.3",
"detect-libc": "^2.0.1",
"node-addon-api": "^5.0.0",
"prebuild-install": "^7.1.1",
"semver": "^7.3.8",
"simple-get": "^4.0.1",
"tar-fs": "^2.1.1",
"tunnel-agent": "^0.6.0"
},
"engines": {
"node": ">=14.15.0"
},
"funding": {
"url": "https://opencollective.com/libvips"
}
},
"node_modules/simple-concat": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz",
"integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/simple-get": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz",
"integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"decompress-response": "^6.0.0",
"once": "^1.3.1",
"simple-concat": "^1.0.0"
}
},
"node_modules/simple-swizzle": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
"dependencies": {
"is-arrayish": "^0.3.1"
}
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"dependencies": {
"safe-buffer": "~5.2.0"
}
},
"node_modules/strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/tar-fs": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz",
"integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==",
"dependencies": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.1.4"
}
},
"node_modules/tar-stream": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
"dependencies": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
},
"engines": {
"node": ">=6"
}
},
"node_modules/tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
"dependencies": {
"safe-buffer": "^5.0.1"
},
"engines": {
"node": "*"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/whatwg-mimetype": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz",
@@ -632,16 +158,6 @@
"engines": {
"node": ">=12"
}
},
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
},
"dependencies": {
@@ -732,344 +248,15 @@
"follow-redirects": "^1.14.4"
}
},
"base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="
},
"bl": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"requires": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
}
},
"buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
"requires": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
}
},
"chownr": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
},
"color": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz",
"integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==",
"requires": {
"color-convert": "^2.0.1",
"color-string": "^1.9.0"
}
},
"color-convert": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"requires": {
"color-name": "~1.1.4"
}
},
"color-name": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
},
"color-string": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz",
"integrity": "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg==",
"requires": {
"color-name": "^1.0.0",
"simple-swizzle": "^0.2.2"
}
},
"decompress-response": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz",
"integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==",
"requires": {
"mimic-response": "^3.1.0"
}
},
"deep-extend": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz",
"integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA=="
},
"detect-libc": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz",
"integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w=="
},
"end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
"requires": {
"once": "^1.4.0"
}
},
"expand-template": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz",
"integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg=="
},
"follow-redirects": {
"version": "1.14.9",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.9.tgz",
"integrity": "sha512-MQDfihBQYMcyy5dhRDJUHcw7lb2Pv/TuE6xP1vyraLukNDHKbDxDNaOE3NbCAdKQApno+GPRyo1YAp89yCjK4w=="
},
"fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
},
"github-from-package": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz",
"integrity": "sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw=="
},
"ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA=="
},
"inherits": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
"integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="
},
"ini": {
"version": "1.3.8",
"resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz",
"integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew=="
},
"is-arrayish": {
"version": "0.3.2",
"resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz",
"integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="
},
"lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"requires": {
"yallist": "^4.0.0"
}
},
"mimic-response": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz",
"integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ=="
},
"minimist": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="
},
"mkdirp-classic": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"napi-build-utils": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz",
"integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg=="
},
"node-abi": {
"version": "3.33.0",
"resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.33.0.tgz",
"integrity": "sha512-7GGVawqyHF4pfd0YFybhv/eM9JwTtPqx0mAanQ146O3FlSh3pA24zf9IRQTOsfTSqXTNzPSP5iagAJ94jjuVog==",
"requires": {
"semver": "^7.3.5"
}
},
"node-addon-api": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz",
"integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA=="
},
"once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"requires": {
"wrappy": "1"
}
},
"prebuild-install": {
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz",
"integrity": "sha512-jAXscXWMcCK8GgCoHOfIr0ODh5ai8mj63L2nWrjuAgXE6tDyYGnx4/8o/rCgU+B4JSyZBKbeZqzhtwtC3ovxjw==",
"requires": {
"detect-libc": "^2.0.0",
"expand-template": "^2.0.3",
"github-from-package": "0.0.0",
"minimist": "^1.2.3",
"mkdirp-classic": "^0.5.3",
"napi-build-utils": "^1.0.1",
"node-abi": "^3.3.0",
"pump": "^3.0.0",
"rc": "^1.2.7",
"simple-get": "^4.0.0",
"tar-fs": "^2.0.0",
"tunnel-agent": "^0.6.0"
}
},
"pump": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz",
"integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==",
"requires": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"rc": {
"version": "1.2.8",
"resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz",
"integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==",
"requires": {
"deep-extend": "^0.6.0",
"ini": "~1.3.0",
"minimist": "^1.2.0",
"strip-json-comments": "~2.0.1"
}
},
"readable-stream": {
"version": "3.6.1",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.1.tgz",
"integrity": "sha512-+rQmrWMYGA90yenhTYsLWAsLsqVC8osOw6PKE1HDYiO0gdPeKe/xDHNzIAIn4C91YQ6oenEhfYqqc1883qHbjQ==",
"requires": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
}
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
},
"semver": {
"version": "7.3.8",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz",
"integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==",
"requires": {
"lru-cache": "^6.0.0"
}
},
"sharp": {
"version": "0.31.3",
"resolved": "https://registry.npmjs.org/sharp/-/sharp-0.31.3.tgz",
"integrity": "sha512-XcR4+FCLBFKw1bdB+GEhnUNXNXvnt0tDo4WsBsraKymuo/IAuPuCBVAL2wIkUw2r/dwFW5Q5+g66Kwl2dgDFVg==",
"requires": {
"color": "^4.2.3",
"detect-libc": "^2.0.1",
"node-addon-api": "^5.0.0",
"prebuild-install": "^7.1.1",
"semver": "^7.3.8",
"simple-get": "^4.0.1",
"tar-fs": "^2.1.1",
"tunnel-agent": "^0.6.0"
}
},
"simple-concat": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz",
"integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q=="
},
"simple-get": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz",
"integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==",
"requires": {
"decompress-response": "^6.0.0",
"once": "^1.3.1",
"simple-concat": "^1.0.0"
}
},
"simple-swizzle": {
"version": "0.2.2",
"resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz",
"integrity": "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg==",
"requires": {
"is-arrayish": "^0.3.1"
}
},
"string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"requires": {
"safe-buffer": "~5.2.0"
}
},
"strip-json-comments": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz",
"integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ=="
},
"tar-fs": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz",
"integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==",
"requires": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.1.4"
}
},
"tar-stream": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
"requires": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
}
},
"tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
"requires": {
"safe-buffer": "^5.0.1"
}
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"whatwg-mimetype": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz",
"integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q=="
},
"wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/snapshot",
"version": "0.0.48",
"version": "0.0.52",
"description": "Snapshot Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
@@ -26,15 +26,11 @@
"name": "Snapshot Plugin",
"type": "API",
"interfaces": [
"DeviceProvider",
"Settings",
"MixinProvider",
"BufferConverter"
]
},
"optionalDependencies": {
"sharp": "^0.31.3"
},
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",
"@types/node": "^16.6.1",

View File

@@ -1,113 +0,0 @@
import sdk, { BufferConverter, Image, ImageOptions, MediaObject, MediaObjectOptions, ScryptedDeviceBase, ScryptedMimeTypes } from "@scrypted/sdk";
import sharp from 'sharp';
async function createVipsMediaObject(image: VipsImage): Promise<Image & MediaObject> {
const ret = await sdk.mediaManager.createMediaObject(image, ScryptedMimeTypes.Image, {
width: image.width,
height: image.height,
toBuffer: (options: ImageOptions) => image.toBuffer(options),
toImage: async (options: ImageOptions) => {
const newImage = await image.toVipsImage(options);
return createVipsMediaObject(newImage);
}
});
return ret;
}
class VipsImage implements Image {
constructor(public image: sharp.Sharp, public metadata: sharp.Metadata) {
}
get width() {
return this.metadata.width;
}
get height() {
return this.metadata.height;
}
toImageInternal(options: ImageOptions) {
const transformed = this.image.clone();
if (options?.crop) {
transformed.extract({
left: Math.floor(options.crop.left),
top: Math.floor(options.crop.top),
width: Math.floor(options.crop.width),
height: Math.floor(options.crop.height),
});
}
if (options?.resize) {
transformed.resize(typeof options.resize.width === 'number' ? Math.floor(options.resize.width) : undefined, typeof options.resize.height === 'number' ? Math.floor(options.resize.height) : undefined, {
fit: "fill",
});
}
return transformed;
}
async toBuffer(options: ImageOptions) {
const transformed = this.toImageInternal(options);
if (options?.format === 'rgb') {
transformed.removeAlpha().toFormat('raw');
}
else if (options?.format === 'jpg') {
transformed.toFormat('jpg');
}
return transformed.toBuffer();
}
async toVipsImage(options: ImageOptions) {
const transformed = this.toImageInternal(options);
const { info, data } = await transformed.raw().toBuffer({
resolveWithObject: true,
});
const newImage = sharp(data, {
raw: info,
});
const newMetadata = await newImage.metadata();
const newVipsImage = new VipsImage(newImage, newMetadata);
return newVipsImage;
}
async toImage(options: ImageOptions) {
if (options.format)
throw new Error('format can only be used with toBuffer');
const newVipsImage = await this.toVipsImage(options);
return createVipsMediaObject(newVipsImage);
}
}
export class ImageWriter extends ScryptedDeviceBase implements BufferConverter {
constructor(nativeId: string) {
super(nativeId);
this.fromMimeType = ScryptedMimeTypes.Image;
this.toMimeType = 'image/*';
}
async convert(data: Image, fromMimeType: string, toMimeType: string, options?: MediaObjectOptions): Promise<Buffer> {
return data.toBuffer({
format: 'jpg',
});
}
}
export class ImageReader extends ScryptedDeviceBase implements BufferConverter {
constructor(nativeId: string) {
super(nativeId);
this.fromMimeType = 'image/*';
this.toMimeType = ScryptedMimeTypes.Image;
}
async convert(data: Buffer, fromMimeType: string, toMimeType: string, options?: MediaObjectOptions): Promise<Image> {
const image = sharp(data, {
failOnError: false,
});
const metadata = await image.metadata();
const vipsImage = new VipsImage(image, metadata);
return createVipsMediaObject(vipsImage);
}
}

View File

@@ -8,9 +8,7 @@ import axios, { AxiosInstance } from "axios";
import https from 'https';
import path from 'path';
import MimeType from 'whatwg-mimetype';
import { ffmpegFilterImage } from './ffmpeg-image-filter';
import { ImageReader, ImageWriter } from './image-reader';
import { sharpFilterImage } from './sharp-image-filter';
import { ffmpegFilterImage, ffmpegFilterImageBuffer } from './ffmpeg-image-filter';
const { mediaManager, systemManager } = sdk;
@@ -301,7 +299,8 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
} : undefined);
picture = await this.cropAndScale(picture);
if (needSoftwareResize) {
picture = await sharpFilterImage(picture, {
picture = await ffmpegFilterImageBuffer(picture, {
ffmpegPath: await mediaManager.getFFmpegPath(),
console: this.debugConsole,
resize: options?.picture,
});
@@ -353,7 +352,8 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
const xmax = Math.max(...this.storageSettings.values.snapshotCropScale.map(([x, y]) => x)) / 100;
const ymax = Math.max(...this.storageSettings.values.snapshotCropScale.map(([x, y]) => y)) / 100;
return sharpFilterImage(buffer, {
return ffmpegFilterImageBuffer(buffer, {
ffmpegPath: await mediaManager.getFFmpegPath(),
console: this.debugConsole,
crop: {
fractional: true,
@@ -445,7 +445,8 @@ class SnapshotMixin extends SettingsMixinDeviceBase<Camera> implements Camera {
})
}
else {
return sharpFilterImage(errorBackground, {
return ffmpegFilterImageBuffer(errorBackground, {
ffmpegPath: await mediaManager.getFFmpegPath(),
console: this.debugConsole,
blur: true,
brightness: -.2,
@@ -497,7 +498,7 @@ export function parseDims<T extends string>(dict: DimDict<T>) {
return ret;
}
class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, BufferConverter, Settings, DeviceProvider {
class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, BufferConverter, Settings {
storageSettings = new StorageSettings(this, {
debugLogging: {
title: 'Debug Logging',
@@ -515,37 +516,11 @@ class SnapshotPlugin extends AutoenableMixinProvider implements MixinProvider, B
process.nextTick(() => {
sdk.deviceManager.onDevicesChanged({
devices: [
{
name: 'Image Reader',
type: ScryptedDeviceType.Builtin,
nativeId: 'reader',
interfaces: [
ScryptedInterface.BufferConverter,
]
},
{
name: 'Image Writer',
type: ScryptedDeviceType.Builtin,
nativeId: 'writer',
interfaces: [
ScryptedInterface.BufferConverter,
]
}
]
})
})
}
async getDevice(nativeId: string): Promise<any> {
if (nativeId === 'reader')
return new ImageReader('reader')
if (nativeId === 'writer')
return new ImageWriter('writer')
}
async releaseDevice(id: string, nativeId: string): Promise<void> {
}
getSettings(): Promise<Setting[]> {
return this.storageSettings.getSettings();
}

View File

@@ -1,92 +0,0 @@
import sharp, { FormatEnum, AvailableFormatInfo } from 'sharp';
export interface SharpImageFilterOptions {
console?: Console,
blur?: boolean;
brightness?: number;
text?: {
text: string;
fontFile: string;
};
resize?: {
fractional?: boolean;
width?: number;
height?: number;
};
crop?: {
fractional?: boolean;
left: number;
top: number;
width: number;
height: number;
};
format?: keyof FormatEnum | AvailableFormatInfo;
}
export async function sharpFilterImage(inputJpeg: Buffer | string, options: SharpImageFilterOptions) {
let image = sharp(inputJpeg, {
failOnError: false,
});
const metadata = await image.metadata();
if (options?.crop) {
let { left, top, width, height, fractional } = options.crop;
if (fractional) {
left = Math.floor(left * metadata.width);
width = Math.floor(width * metadata.width);
top = Math.floor(top * metadata.height);
height = Math.floor(height * metadata.height);
}
image = image.extract({
left,
top,
width,
height,
});
}
if (options?.resize) {
let { width, height, fractional } = options.resize;
if (fractional) {
if (width)
width = Math.floor(width * metadata.width);
if (height)
height = Math.floor(height * metadata.height);
}
image = image.resize(width, height);
}
if (options?.brightness) {
image = image.modulate({
lightness: options.brightness * 100,
});
}
if (options?.blur) {
image = image.blur(25);
}
if (options?.text) {
image = image.composite([
{
input: {
text: {
rgba: true,
text: `<span foreground="white">${options.text.text}</span>`,
// this is not working?
// font: 'Lato',
// fontfile: options?.text.fontFile,
dpi: metadata.height,
},
},
}
])
}
image = image.toFormat(options?.format || 'jpg');
return image.toBuffer();
}

View File

@@ -1,19 +0,0 @@
#!/bin/sh
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
mkdir -p all_models
wget https://dl.google.com/coral/canned_models/all_models.tar.gz
tar -C all_models -xvzf all_models.tar.gz
rm -f all_models.tar.gz

View File

@@ -1 +0,0 @@
../all_models/coco_labels.txt

View File

@@ -1 +0,0 @@
../all_models/mobilenet_ssd_v2_coco_quant_postprocess.tflite

View File

@@ -1 +0,0 @@
../all_models/mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.0.110",
"version": "0.1.2",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.0.110",
"version": "0.1.2",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -44,5 +44,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.110"
"version": "0.1.2"
}

View File

@@ -122,6 +122,9 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
def get_input_details(self) -> Tuple[int, int, int]:
pass
def get_input_format(self) -> str:
pass
def getModelSettings(self, settings: Any = None) -> list[Setting]:
return []
@@ -131,6 +134,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
'classes': self.getClasses(),
'triggerClasses': self.getTriggerClasses(),
'inputSize': self.get_input_details(),
'inputFormat': self.get_input_format(),
'settings': [],
}
@@ -206,14 +210,14 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
def run_detection_gstsample(self, detection_session: DetectionSession, gst_sample, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
pass
async def run_detection_videoframe(self, videoFrame: scrypted_sdk.VideoFrame) -> ObjectsDetected:
async def run_detection_videoframe(self, videoFrame: scrypted_sdk.VideoFrame, detection_session: DetectionSession) -> ObjectsDetected:
pass
def run_detection_avframe(self, detection_session: DetectionSession, avframe, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
async def run_detection_avframe(self, detection_session: DetectionSession, avframe, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
pil: Image.Image = avframe.to_image()
return self.run_detection_image(detection_session, pil, settings, src_size, convert_to_src_size)
return await self.run_detection_image(detection_session, pil, settings, src_size, convert_to_src_size)
def run_detection_image(self, detection_session: DetectionSession, image: Image.Image, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
async def run_detection_image(self, detection_session: DetectionSession, image: Image.Image, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Any]:
pass
def run_detection_crop(self, detection_session: DetectionSession, sample: Any, settings: Any, src_size, convert_to_src_size, bounding_box: Tuple[float, float, float, float]) -> ObjectsDetected:
@@ -288,17 +292,21 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
async def generateObjectDetections(self, videoFrames: Any, session: ObjectDetectionGeneratorSession = None) -> Any:
try:
videoFrames = await scrypted_sdk.sdk.connectRPCObject(videoFrames)
detection_session = self.create_detection_session()
detection_session.plugin = self
detection_session.settings = session and session.get('settings')
async for videoFrame in videoFrames:
detected = await self.run_detection_videoframe(videoFrame, session and session.get('settings'))
detected = await self.run_detection_videoframe(videoFrame, detection_session)
yield {
'__json_copy_serialize_children': True,
'detected': detected,
'videoFrame': videoFrame,
}
except:
raise
finally:
await videoFrames.aclose()
try:
await videoFrames.aclose()
except:
pass
async def detectObjects(self, mediaObject: MediaObject, session: ObjectDetectionSession = None, callbacks: ObjectDetectionCallbacks = None) -> ObjectsDetected:
is_image = mediaObject and (mediaObject.mimeType.startswith('image/') or mediaObject.mimeType.endswith('/x-raw-image'))
@@ -335,7 +343,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
finally:
detection_session.running = False
else:
return self.run_detection_jpeg(detection_session, bytes(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(mediaObject, 'image/jpeg')), settings)
return await self.run_detection_jpeg(detection_session, bytes(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(mediaObject, 'image/jpeg')), settings)
if not create:
# a detection session may have been created, but not started
@@ -456,7 +464,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
return ret
def detection_event_notified(self, settings: Any):
async def detection_event_notified(self, settings: Any):
pass
async def createMedia(self, data: Any) -> MediaObject:
@@ -479,7 +487,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
if not current_data:
raise Exception('no sample')
detection_result = self.run_detection_crop(
detection_result = await self.run_detection_crop(
detection_session, current_data, detection_session.settings, current_src_size, current_convert_to_src_size, boundingBox)
return detection_result['detections']
@@ -493,7 +501,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
first_frame = False
print("first frame received", detection_session.id)
detection_result, data = run_detection(
detection_result, data = await run_detection(
detection_session, sample, detection_session.settings, src_size, convert_to_src_size)
if detection_result:
detection_result['running'] = True
@@ -527,7 +535,7 @@ class DetectPlugin(scrypted_sdk.ScryptedDeviceBase, ObjectDetection):
self.invalidateMedia(detection_session, data)
# asyncio.run_coroutine_threadsafe(, loop = self.loop).result()
self.detection_event_notified(detection_session.settings)
await self.detection_event_notified(detection_session.settings)
if not detection_session or duration == None:
safe_set_result(detection_session.loop,

View File

@@ -8,6 +8,8 @@ from typing import Any, List, Tuple, Mapping
import asyncio
import time
from .rectangle import Rectangle, intersect_area, intersect_rect, to_bounding_box, from_bounding_box, combine_rect
import urllib.request
import os
from detect import DetectionSession, DetectPlugin
@@ -126,6 +128,17 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
loop = asyncio.get_event_loop()
loop.call_later(4 * 60 * 60, lambda: self.requestRestart())
def downloadFile(self, url: str, filename: str):
filesPath = os.path.join(os.environ['SCRYPTED_PLUGIN_VOLUME'], 'files')
fullpath = os.path.join(filesPath, filename)
if os.path.isfile(fullpath):
return fullpath
os.makedirs(filesPath, exist_ok=True)
tmp = fullpath + '.tmp'
urllib.request.urlretrieve(url, tmp)
os.rename(tmp, fullpath)
return fullpath
def getClasses(self) -> list[str]:
return list(self.labels.values())
@@ -250,13 +263,13 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
# print(detection_result)
return detection_result
def run_detection_jpeg(self, detection_session: PredictSession, image_bytes: bytes, settings: Any) -> ObjectsDetected:
async def run_detection_jpeg(self, detection_session: PredictSession, image_bytes: bytes, settings: Any) -> ObjectsDetected:
stream = io.BytesIO(image_bytes)
image = Image.open(stream)
if image.mode == 'RGBA':
image = image.convert('RGB')
detections, _ = self.run_detection_image(detection_session, image, settings, image.size)
detections, _ = await self.run_detection_image(detection_session, image, settings, image.size)
return detections
def get_detection_input_size(self, src_size):
@@ -266,13 +279,14 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
# this is useful for high quality thumbnails.
return (None, None)
def get_input_size(self) -> Tuple[float, float]:
def get_input_size(self) -> Tuple[int, int]:
pass
def detect_once(self, input: Image.Image, settings: Any, src_size, cvss) -> ObjectsDetected:
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss) -> ObjectsDetected:
pass
async def run_detection_videoframe(self, videoFrame: scrypted_sdk.VideoFrame, settings: Any) -> ObjectsDetected:
async def run_detection_videoframe(self, videoFrame: scrypted_sdk.VideoFrame, detection_session: PredictSession) -> ObjectsDetected:
settings = detection_session and detection_session.settings
src_size = videoFrame.width, videoFrame.height
w, h = self.get_input_size()
iw, ih = src_size
@@ -288,7 +302,7 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
})
image = Image.frombuffer('RGB', (w, h), data)
try:
ret = self.detect_once(image, settings, src_size, cvss)
ret = await self.detect_once(image, settings, src_size, cvss)
return ret
finally:
image.close()
@@ -339,9 +353,9 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
def cvss2(point, normalize=False):
return point[0] / s + ow, point[1] / s + oh, True
ret1 = self.detect_once(first, settings, src_size, cvss1)
ret1 = await self.detect_once(first, settings, src_size, cvss1)
first.close()
ret2 = self.detect_once(second, settings, src_size, cvss2)
ret2 = await self.detect_once(second, settings, src_size, cvss2)
second.close()
two_intersect = intersect_rect(Rectangle(*first_crop), Rectangle(*second_crop))
@@ -374,7 +388,7 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
ret['detections'] = dedupe_detections(ret1['detections'] + ret2['detections'], is_same_detection=is_same_detection_middle)
return ret
def run_detection_image(self, detection_session: PredictSession, image: Image.Image, settings: Any, src_size, convert_to_src_size: Any = None, multipass_crop: Tuple[float, float, float, float] = None):
async def run_detection_image(self, detection_session: PredictSession, image: Image.Image, settings: Any, src_size, convert_to_src_size: Any = None, multipass_crop: Tuple[float, float, float, float] = None):
(w, h) = self.get_input_size() or image.size
(iw, ih) = image.size
@@ -448,7 +462,7 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
converted = convert_to_src_size(unscaled, normalize) if convert_to_src_size else (unscaled[0], unscaled[1], True)
return converted
ret = self.detect_once(input, settings, src_size, cvss)
ret = await self.detect_once(input, settings, src_size, cvss)
input.close()
detection_session.processed = detection_session.processed + 1
return ret, RawImage(image)
@@ -461,7 +475,7 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
converted = convert_to_src_size(point, normalize) if convert_to_src_size else (point[0], point[1], True)
return converted
ret = self.detect_once(image, settings, src_size, cvss)
ret = await self.detect_once(image, settings, src_size, cvss)
if detection_session:
detection_session.processed = detection_session.processed + 1
else:
@@ -483,11 +497,11 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
converted = convert_to_src_size(unscaled, normalize) if convert_to_src_size else (unscaled[0], unscaled[1], True)
return converted
ret1 = self.detect_once(first, settings, src_size, cvss1)
ret1 = await self.detect_once(first, settings, src_size, cvss1)
first.close()
if detection_session:
detection_session.processed = detection_session.processed + 1
ret2 = self.detect_once(second, settings, src_size, cvss2)
ret2 = await self.detect_once(second, settings, src_size, cvss2)
if detection_session:
detection_session.processed = detection_session.processed + 1
second.close()
@@ -576,11 +590,11 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
# print('untracked %s: %s' % (d['className'], d['score']))
def run_detection_crop(self, detection_session: DetectionSession, sample: RawImage, settings: Any, src_size, convert_to_src_size, bounding_box: Tuple[float, float, float, float]) -> ObjectsDetected:
(ret, _) = self.run_detection_image(detection_session, sample.image, settings, src_size, convert_to_src_size, bounding_box)
async def run_detection_crop(self, detection_session: DetectionSession, sample: RawImage, settings: Any, src_size, convert_to_src_size, bounding_box: Tuple[float, float, float, float]) -> ObjectsDetected:
(ret, _) = await self.run_detection_image(detection_session, sample.image, settings, src_size, convert_to_src_size, bounding_box)
return ret
def run_detection_gstsample(self, detection_session: PredictSession, gstsample, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Image.Image]:
async def run_detection_gstsample(self, detection_session: PredictSession, gstsample, settings: Any, src_size, convert_to_src_size) -> Tuple[ObjectsDetected, Image.Image]:
caps = gstsample.get_caps()
# can't trust the width value, compute the stride
height = caps.get_structure(0).get_value('height')
@@ -604,7 +618,7 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
gst_buffer.unmap(info)
try:
return self.run_detection_image(detection_session, image, settings, src_size, convert_to_src_size)
return await self.run_detection_image(detection_session, image, settings, src_size, convert_to_src_size)
except:
image.close()
traceback.print_exc()

View File

@@ -3,7 +3,6 @@ import threading
from .common import *
from PIL import Image
from pycoral.adapters import detect
from pycoral.adapters.common import input_size
loaded_py_coral = False
try:
from pycoral.utils.edgetpu import list_edge_tpus
@@ -19,6 +18,9 @@ import scrypted_sdk
from scrypted_sdk.types import Setting
from typing import Any, Tuple
from predict import PredictPlugin
import concurrent.futures
import queue
import asyncio
def parse_label_contents(contents: str):
lines = contents.splitlines()
@@ -38,9 +40,15 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
def __init__(self, nativeId: str | None = None):
super().__init__(MIME_TYPE, nativeId=nativeId)
labels_contents = scrypted_sdk.zip.open(
'fs/coco_labels.txt').read().decode('utf8')
tfliteFile = self.downloadFile('https://raw.githubusercontent.com/google-coral/test_data/master/ssd_mobilenet_v2_coco_quant_postprocess.tflite', 'ssd_mobilenet_v2_coco_quant_postprocess.tflite')
edgetpuFile = self.downloadFile('https://raw.githubusercontent.com/google-coral/test_data/master/ssd_mobilenet_v2_coco_quant_postprocess_edgetpu.tflite', 'ssd_mobilenet_v2_coco_quant_postprocess_edgetpu.tflite')
labelsFile = self.downloadFile('https://raw.githubusercontent.com/google-coral/test_data/master/coco_labels.txt', 'coco_labels.txt')
labels_contents = open(labelsFile, 'r').read()
self.labels = parse_label_contents(labels_contents)
self.interpreters = queue.Queue()
self.interpreter_count = 0
try:
edge_tpus = list_edge_tpus()
print('edge tpus', edge_tpus)
@@ -49,23 +57,39 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
self.edge_tpu_found = str(edge_tpus)
# todo co-compile
# https://coral.ai/docs/edgetpu/compiler/#co-compiling-multiple-models
model = scrypted_sdk.zip.open(
'fs/mobilenet_ssd_v2_coco_quant_postprocess_edgetpu.tflite').read()
# face_model = scrypted_sdk.zip.open(
# 'fs/mobilenet_ssd_v2_face_quant_postprocess.tflite').read()
self.interpreter = make_interpreter(model)
for idx, edge_tpu in enumerate(edge_tpus):
try:
interpreter = make_interpreter(edgetpuFile, ":%s" % idx)
interpreter.allocate_tensors()
_, height, width, channels = interpreter.get_input_details()[
0]['shape']
self.input_details = int(width), int(height), int(channels)
self.interpreters.put(interpreter)
self.interpreter_count = self.interpreter_count + 1
print('added tpu %s' % (edge_tpu))
except Exception as e:
print('unable to use Coral Edge TPU', e)
if not self.interpreter_count:
raise Exception('all tpus failed to load')
# self.face_interpreter = make_interpreter(face_model)
except Exception as e:
print('unable to use Coral Edge TPU', e)
self.edge_tpu_found = 'Edge TPU not found'
model = scrypted_sdk.zip.open(
'fs/mobilenet_ssd_v2_coco_quant_postprocess.tflite').read()
# face_model = scrypted_sdk.zip.open(
# 'fs/mobilenet_ssd_v2_face_quant_postprocess.tflite').read()
self.interpreter = tflite.Interpreter(model_content=model)
interpreter = tflite.Interpreter(model_path=tfliteFile)
interpreter.allocate_tensors()
_, height, width, channels = interpreter.get_input_details()[
0]['shape']
self.input_details = int(width), int(height), int(channels)
self.interpreters.put(interpreter)
self.interpreter_count = self.interpreter_count + 1
# self.face_interpreter = make_interpreter(face_model)
self.interpreter.allocate_tensors()
self.mutex = threading.Lock()
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.interpreter_count, thread_name_prefix="tflite", )
async def getSettings(self) -> list[Setting]:
ret = await super().getSettings()
@@ -83,29 +107,32 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
with self.mutex:
_, height, width, channels = self.interpreter.get_input_details()[
0]['shape']
return int(width), int(height), int(channels)
return self.input_details
def get_input_size(self) -> Tuple[float, float]:
return input_size(self.interpreter)
def get_input_size(self) -> Tuple[int, int]:
return self.input_details[0:2]
def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
try:
with self.mutex:
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
def predict():
interpreter = self.interpreters.get()
try:
common.set_input(
self.interpreter, input)
interpreter, input)
scale = (1, 1)
# _, scale = common.set_resized_input(
# self.interpreter, cropped.size, lambda size: cropped.resize(size, Image.ANTIALIAS))
self.interpreter.invoke()
interpreter.invoke()
objs = detect.get_objects(
self.interpreter, score_threshold=.2, image_scale=scale)
except:
print('tensorflow-lite encountered an error while detecting. requesting plugin restart.')
self.requestRestart()
raise e
interpreter, score_threshold=.2, image_scale=scale)
return objs
except:
print('tensorflow-lite encountered an error while detecting. requesting plugin restart.')
self.requestRestart()
raise e
finally:
self.interpreters.put(interpreter)
objs = await asyncio.get_event_loop().run_in_executor(self.executor, predict)
allowList = settings.get('allowList', None) if settings else None
ret = self.create_detection_result(objs, src_size, allowList, cvss)

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/tensorflow-lite",
"version": "0.1.3",
"version": "0.1.4",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/tensorflow-lite",
"version": "0.1.3",
"version": "0.1.4",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -41,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.3"
"version": "0.1.4"
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/webrtc",
"version": "0.1.36",
"version": "0.1.37",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/webrtc",
"version": "0.1.36",
"version": "0.1.37",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/webrtc",
"version": "0.1.36",
"version": "0.1.37",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -115,7 +115,8 @@ class WebRTCMixin extends SettingsMixinDeviceBase<RTCSignalingClient & VideoCame
const device = systemManager.getDeviceById<VideoCamera & Intercom>(this.id);
const hasIntercom = this.mixinDeviceInterfaces.includes(ScryptedInterface.Intercom);
const mo = await sdk.mediaManager.createMediaObject(device, ScryptedMimeTypes.ScryptedDevice, {
const requestMediaStream: RequestMediaStream = async options => device.getVideoStream(options);
const mo = await mediaManager.createMediaObject(requestMediaStream, ScryptedMimeTypes.RequestMediaStream, {
sourceId: device.id,
});
@@ -126,7 +127,7 @@ class WebRTCMixin extends SettingsMixinDeviceBase<RTCSignalingClient & VideoCame
mo,
this.plugin.storageSettings.values.maximumCompatibilityMode,
this.plugin.getRTCConfiguration(),
await this.plugin.getWeriftConfiguration(),
await this.plugin.getWeriftConfiguration(options?.disableTurn),
);
}
@@ -409,7 +410,7 @@ export class WebRTCPlugin extends AutoenableMixinProvider implements DeviceCreat
};
}
async getWeriftConfiguration(): Promise<Partial<PeerConfig>> {
async getWeriftConfiguration(disableTurn?: boolean): Promise<Partial<PeerConfig>> {
let ret: Partial<PeerConfig>;
if (this.storageSettings.values.weriftConfiguration) {
try {
@@ -420,7 +421,7 @@ export class WebRTCPlugin extends AutoenableMixinProvider implements DeviceCreat
}
}
const iceServers = this.storageSettings.values.useTurnServer
const iceServers = this.storageSettings.values.useTurnServer && !disableTurn
? [weriftStunServer, weriftTurnServer]
: [weriftStunServer];

View File

@@ -233,7 +233,7 @@ class HttpResponseOptions(TypedDict):
class ImageOptions(TypedDict):
crop: Any
format: Any | Any | Any
format: Any | Any | Any | Any
resize: Any
pass
@@ -486,6 +486,7 @@ class ObjectDetectionGeneratorSession(TypedDict):
class ObjectDetectionModel(TypedDict):
classes: list[str]
inputFormat: Any | Any | Any
inputSize: list[float]
name: str
settings: list[Setting]
@@ -693,7 +694,7 @@ class VideoClipOptions(TypedDict):
class VideoFrameGeneratorOptions(TypedDict):
crop: Any
format: Any | Any | Any
format: Any | Any | Any | Any
resize: Any
pass

View File

@@ -1296,6 +1296,7 @@ export interface ObjectDetectionSession extends ObjectDetectionGeneratorSession
export interface ObjectDetectionModel extends ObjectDetectionTypes {
name: string;
inputSize?: number[];
inputFormat?: 'gray' | 'rgb' | 'rgba';
settings: Setting[];
triggerClasses?: string[];
}
@@ -1328,7 +1329,7 @@ export interface ImageOptions {
width?: number,
height?: number,
};
format?: 'rgba' | 'rgb' | 'jpg';
format?: 'gray' | 'rgba' | 'rgb' | 'jpg';
}
export interface Image {
width: number;
@@ -1942,7 +1943,14 @@ export interface RTCSignalingOptions {
*/
offer?: RTCSessionDescriptionInit;
requiresOffer?: boolean;
/**
* Disables trickle ICE. All candidates must be sent in the initial offer/answer sdp.
*/
disableTrickle?: boolean;
/**
* Disables usage of TURN servers, if this client exposes public addresses or provides its own.
*/
disableTurn?: boolean;
/**
* Hint to proxy the feed, as the target client may be inflexible.
*/

View File

@@ -28,7 +28,7 @@
"${workspaceFolder}/**/*.js"
],
"env": {
"SCRYPTED_PYTHON_PATH": "python3.9",
"SCRYPTED_PYTHON_PATH": "python3.10",
// "SCRYPTED_SHARED_WORKER": "true",
// "SCRYPTED_DISABLE_AUTHENTICATION": "true",
// "DEBUG": "*",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/server",
"version": "0.7.5",
"version": "0.7.14",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/server",
"version": "0.7.5",
"version": "0.7.14",
"license": "ISC",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/server",
"version": "0.7.7",
"version": "0.7.16",
"description": "",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10",
@@ -67,8 +67,11 @@
"prebuild": "rimraf dist",
"build": "tsc --outDir dist",
"postbuild": "node test/check-build-output.js",
"prepublishOnly": "npm version patch && git add package.json && npm run build && git commit -m prepublish",
"postpublish": "git tag v$npm_package_version && git push origin v$npm_package_version",
"prebeta": "npm version patch && git add package.json && npm run build && git commit -m prebeta",
"beta": "npm publish --tag beta",
"release": "npm publish",
"prerelease": "npm version patch && git add package.json && npm run build && git commit -m prerelease",
"postrelease": "git tag v$npm_package_version && git push origin v$npm_package_version",
"docker": "scripts/github-workflow-publish-docker.sh"
},
"author": "",

View File

@@ -8,6 +8,7 @@ import platform
import shutil
import subprocess
import threading
import concurrent.futures
import time
import traceback
import zipfile
@@ -34,7 +35,6 @@ class SystemDeviceState(TypedDict):
stateTime: int
value: any
class SystemManager(scrypted_python.scrypted_sdk.types.SystemManager):
def __init__(self, api: Any, systemState: Mapping[str, Mapping[str, SystemDeviceState]]) -> None:
super().__init__()
@@ -269,8 +269,9 @@ class PluginRemote:
clusterSecret = options['clusterSecret']
async def handleClusterClient(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
rpcTransport = rpc_reader.RpcStreamTransport(reader, writer)
peer: rpc.RpcPeer
peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, reader = reader, writer = writer)
peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
async def connectRPCObject(id: str, secret: str):
m = hashlib.sha256()
m.update(bytes('%s%s' % (clusterPort, clusterSecret), 'utf8'))
@@ -305,7 +306,8 @@ class PluginRemote:
async def connectClusterPeer():
reader, writer = await asyncio.open_connection(
'127.0.0.1', port)
peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, reader = reader, writer = writer)
rpcTransport = rpc_reader.RpcStreamTransport(reader, writer)
peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
async def run_loop():
try:
await peerReadLoop()
@@ -466,15 +468,24 @@ class PluginRemote:
schedule_exit_check()
async def getFork():
fd = os.dup(parent_conn.fileno())
forkPeer, readLoop = await rpc_reader.prepare_peer_readloop(self.loop, fd, fd)
rpcTransport = rpc_reader.RpcConnectionTransport(parent_conn)
forkPeer, readLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
forkPeer.peerName = 'thread'
async def updateStats(stats):
allMemoryStats[forkPeer] = stats
forkPeer.params['updateStats'] = updateStats
async def forkReadLoop():
try:
await readLoop()
except:
# traceback.print_exc()
print('fork read loop exited')
pass
finally:
allMemoryStats.pop(forkPeer)
parent_conn.close()
rpcTransport.executor.shutdown()
asyncio.run_coroutine_threadsafe(forkReadLoop(), loop=self.loop)
getRemote = await forkPeer.getParam('getRemote')
remote: PluginRemote = await getRemote(self.api, self.pluginId, self.hostInfo)
@@ -563,13 +574,19 @@ class PluginRemote:
async def getServicePort(self, name):
pass
async def plugin_async_main(loop: AbstractEventLoop, readFd: int, writeFd: int):
peer, readLoop = await rpc_reader.prepare_peer_readloop(loop, readFd, writeFd)
allMemoryStats = {}
async def plugin_async_main(loop: AbstractEventLoop, rpcTransport: rpc_reader.RpcTransport):
peer, readLoop = await rpc_reader.prepare_peer_readloop(loop, rpcTransport)
peer.params['print'] = print
peer.params['getRemote'] = lambda api, pluginId, hostInfo: PluginRemote(peer, api, pluginId, hostInfo, loop)
async def get_update_stats():
update_stats = await peer.getParam('updateStats')
if not update_stats:
print('host did not provide update_stats')
return
def stats_runner():
ptime = round(time.process_time() * 1000000)
@@ -584,8 +601,12 @@ async def plugin_async_main(loop: AbstractEventLoop, readFd: int, writeFd: int):
resource.RUSAGE_SELF).ru_maxrss
except:
heapTotal = 0
for _, stats in allMemoryStats.items():
ptime += stats['cpu']['user']
heapTotal += stats['memoryUsage']['heapTotal']
stats = {
'type': 'stats',
'cpu': {
'user': ptime,
'system': 0,
@@ -601,10 +622,14 @@ async def plugin_async_main(loop: AbstractEventLoop, readFd: int, writeFd: int):
asyncio.run_coroutine_threadsafe(get_update_stats(), loop)
await readLoop()
try:
await readLoop()
finally:
if type(rpcTransport) == rpc_reader.RpcConnectionTransport:
r: rpc_reader.RpcConnectionTransport = rpcTransport
r.executor.shutdown()
def main(readFd: int, writeFd: int):
def main(rpcTransport: rpc_reader.RpcTransport):
loop = asyncio.new_event_loop()
def gc_runner():
@@ -612,10 +637,10 @@ def main(readFd: int, writeFd: int):
loop.call_later(10, gc_runner)
gc_runner()
loop.run_until_complete(plugin_async_main(loop, readFd, writeFd))
loop.run_until_complete(plugin_async_main(loop, rpcTransport))
loop.close()
def plugin_main(readFd: int, writeFd: int):
def plugin_main(rpcTransport: rpc_reader.RpcTransport):
try:
import gi
gi.require_version('Gst', '1.0')
@@ -624,17 +649,16 @@ def plugin_main(readFd: int, writeFd: int):
loop = GLib.MainLoop()
worker = threading.Thread(target=main, args=(readFd, writeFd), name="asyncio-main")
worker = threading.Thread(target=main, args=(rpcTransport,), name="asyncio-main")
worker.start()
loop.run()
except:
main(readFd, writeFd)
main(rpcTransport)
def plugin_fork(conn: multiprocessing.connection.Connection):
fd = os.dup(conn.fileno())
plugin_main(fd, fd)
plugin_main(rpc_reader.RpcConnectionTransport(conn))
if __name__ == "__main__":
plugin_main(3, 4)
plugin_main(rpc_reader.RpcFileTransport(3, 4))

View File

@@ -29,6 +29,8 @@ class RPCResultError(Exception):
def __init__(self, caught, message):
self.caught = caught
self.message = message
self.name = None
self.stack = None
class RpcSerializer:
@@ -121,6 +123,16 @@ class RpcPeer:
self.killed = False
def __apply__(self, proxyId: str, oneWayMethods: List[str], method: str, args: list):
oneway = oneWayMethods and method in oneWayMethods
if self.killed:
future = Future()
if oneway:
future.set_result(None)
return future
future.set_exception(RPCResultError(None, 'RpcPeer has been killed (apply) ' + str(method)))
return future
serializationContext: Dict = {}
serializedArgs = []
for arg in args:
@@ -134,7 +146,7 @@ class RpcPeer:
'method': method,
}
if oneWayMethods and method in oneWayMethods:
if oneway:
rpcApply['oneway'] = True
self.send(rpcApply, None, serializationContext)
future = Future()
@@ -472,12 +484,13 @@ class RpcPeer:
pass
async def createPendingResult(self, cb: Callable[[str, Callable[[Exception], None]], None]):
# if (Object.isFrozen(this.pendingResults))
# return Promise.reject(new RPCResultError('RpcPeer has been killed'));
future = Future()
if self.killed:
future.set_exception(RPCResultError(None, 'RpcPeer has been killed (createPendingResult)'))
return future
id = str(self.idCounter)
self.idCounter = self.idCounter + 1
future = Future()
self.pendingResults[id] = future
await cb(id, lambda e: future.set_exception(RPCResultError(e, None)))
return await future

View File

@@ -4,14 +4,14 @@ import asyncio
import base64
import json
import os
import sys
import threading
from asyncio.events import AbstractEventLoop
from os import sys
from typing import List
from typing import List, Any
import multiprocessing.connection
import aiofiles
import rpc
import concurrent.futures
import json
class BufferSerializer(rpc.RpcSerializer):
@@ -36,31 +36,131 @@ class SidebandBufferSerializer(rpc.RpcSerializer):
buffer = buffers.pop()
return buffer
async def readLoop(loop, peer: rpc.RpcPeer, reader: asyncio.StreamReader):
class RpcTransport:
async def prepare(self):
pass
async def read(self):
pass
def writeBuffer(self, buffer, reject):
pass
def writeJSON(self, json, reject):
pass
class RpcFileTransport(RpcTransport):
reader: asyncio.StreamReader
def __init__(self, readFd: int, writeFd: int) -> None:
super().__init__()
self.readFd = readFd
self.writeFd = writeFd
self.reader = None
async def prepare(self):
await super().prepare()
self.reader = await aiofiles.open(self.readFd, mode='rb')
async def read(self):
lengthBytes = await self.reader.read(4)
typeBytes = await self.reader.read(1)
type = typeBytes[0]
length = int.from_bytes(lengthBytes, 'big')
data = await self.reader.read(length - 1)
if type == 1:
return data
message = json.loads(data)
return message
def writeMessage(self, type: int, buffer, reject):
length = len(buffer) + 1
lb = length.to_bytes(4, 'big')
try:
for b in [lb, bytes([type]), buffer]:
os.write(self.writeFd, b)
except Exception as e:
if reject:
reject(e)
def writeJSON(self, j, reject):
return self.writeMessage(0, bytes(json.dumps(j), 'utf8'), reject)
def writeBuffer(self, buffer, reject):
return self.writeMessage(1, buffer, reject)
class RpcStreamTransport(RpcTransport):
def __init__(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None:
super().__init__()
self.reader = reader
self.writer = writer
async def read(self):
lengthBytes = await self.reader.readexactly(4)
typeBytes = await self.reader.readexactly(1)
type = typeBytes[0]
length = int.from_bytes(lengthBytes, 'big')
data = await self.reader.readexactly(length - 1)
if type == 1:
return data
message = json.loads(data)
return message
def writeMessage(self, type: int, buffer, reject):
length = len(buffer) + 1
lb = length.to_bytes(4, 'big')
try:
for b in [lb, bytes([type]), buffer]:
self.writer.write(b)
except Exception as e:
if reject:
reject(e)
def writeJSON(self, j, reject):
return self.writeMessage(0, bytes(json.dumps(j), 'utf8'), reject)
def writeBuffer(self, buffer, reject):
return self.writeMessage(1, buffer, reject)
class RpcConnectionTransport(RpcTransport):
def __init__(self, connection: multiprocessing.connection.Connection) -> None:
super().__init__()
self.connection = connection
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1)
async def read(self):
return await asyncio.get_event_loop().run_in_executor(self.executor, lambda: self.connection.recv())
def writeMessage(self, json, reject):
try:
self.connection.send(json)
except Exception as e:
if reject:
reject(e)
def writeJSON(self, json, reject):
return self.writeMessage(json, reject)
def writeBuffer(self, buffer, reject):
return self.writeMessage(bytes(buffer), reject)
async def readLoop(loop, peer: rpc.RpcPeer, rpcTransport: RpcTransport):
deserializationContext = {
'buffers': []
}
if isinstance(reader, asyncio.StreamReader):
async def read(n):
return await reader.readexactly(n)
else:
async def read(n):
return await reader.read(n)
while True:
lengthBytes = await read(4)
typeBytes = await read(1)
type = typeBytes[0]
length = int.from_bytes(lengthBytes, 'big')
data = await read(length - 1)
message = await rpcTransport.read()
if type == 1:
deserializationContext['buffers'].append(data)
if type(message) != dict:
deserializationContext['buffers'].append(message)
continue
message = json.loads(data)
asyncio.run_coroutine_threadsafe(
peer.handleMessage(message, deserializationContext), loop)
@@ -68,46 +168,21 @@ async def readLoop(loop, peer: rpc.RpcPeer, reader: asyncio.StreamReader):
'buffers': []
}
async def prepare_peer_readloop(loop: AbstractEventLoop, readFd: int = None, writeFd: int = None, reader: asyncio.StreamReader = None, writer: asyncio.StreamWriter = None):
reader = reader or await aiofiles.open(readFd, mode='rb')
async def prepare_peer_readloop(loop: AbstractEventLoop, rpcTransport: RpcTransport):
await rpcTransport.prepare()
mutex = threading.Lock()
if writer:
def write(buffers, reject):
try:
for b in buffers:
writer.write(b)
except Exception as e:
if reject:
reject(e)
return None
else:
def write(buffers, reject):
try:
for b in buffers:
os.write(writeFd, b)
except Exception as e:
if reject:
reject(e)
def send(message, reject=None, serializationContext=None):
with mutex:
if serializationContext:
buffers = serializationContext.get('buffers', None)
if buffers:
for buffer in buffers:
length = len(buffer) + 1
lb = length.to_bytes(4, 'big')
type = 1
write([lb, bytes([type]), buffer], reject)
rpcTransport.writeBuffer(buffer, reject)
jsonString = json.dumps(message)
b = bytes(jsonString, 'utf8')
length = len(b) + 1
lb = length.to_bytes(4, 'big')
type = 0
write([lb, bytes([type]), b], reject)
rpcTransport.writeJSON(message, reject)
peer = rpc.RpcPeer(send)
peer.nameDeserializerMap['Buffer'] = SidebandBufferSerializer()
@@ -117,7 +192,7 @@ async def prepare_peer_readloop(loop: AbstractEventLoop, readFd: int = None, wri
async def peerReadLoop():
try:
await readLoop(loop, peer, reader)
await readLoop(loop, peer, rpcTransport)
except:
peer.kill()
raise

View File

@@ -385,7 +385,7 @@ export abstract class MediaManagerBase implements MediaManager {
node[candidateId] = inputWeight + outputWeight;
}
catch (e) {
console.warn(converter.name, 'skipping converter due to error', e)
console.warn(candidate.name, 'skipping converter due to error', e)
}
}

View File

@@ -141,7 +141,8 @@ export class PluginHostAPI extends PluginAPIManagedListeners implements PluginAP
for (const upsert of deviceManifest.devices) {
upsert.providerNativeId = deviceManifest.providerNativeId;
await this.pluginHost.upsertDevice(upsert);
const id = await this.pluginHost.upsertDevice(upsert);
this.scrypted.getDevice(id)?.probe().catch(() => { });
}
}