Compare commits

..

1 Commits

Author SHA1 Message Date
Koushik Dutta
fe165295fb postrelease 2024-03-23 12:34:03 -07:00
206 changed files with 2084 additions and 4674 deletions

View File

@@ -1,11 +1,11 @@
name: Build changed plugins
on:
# push:
# branches: ["main"]
# paths: ["plugins/**"]
# pull_request:
# paths: ["plugins/**"]
push:
branches: ["main"]
paths: ["plugins/**"]
pull_request:
paths: ["plugins/**"]
workflow_dispatch:
jobs:

View File

@@ -7,10 +7,13 @@ jobs:
build:
name: Push Docker image to Docker Hub
runs-on: self-hosted
env:
NODE_VERSION: '20'
# runs-on: ubuntu-latest
strategy:
matrix:
NODE_VERSION: [
# "18",
"20"
]
BASE: ["jammy"]
FLAVOR: ["full", "lite"]
steps:
@@ -20,26 +23,12 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
platforms: linux/amd64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
@@ -65,84 +54,14 @@ jobs:
uses: docker/build-push-action@v4
with:
build-args: |
NODE_VERSION=${{ env.NODE_VERSION }}
NODE_VERSION=${{ matrix.NODE_VERSION }}
BASE=${{ matrix.BASE }}
context: install/docker/
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
platforms: linux/amd64,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.FLAVOR }}
ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.FLAVOR }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-nvidia:
name: Push NVIDIA Docker image to Docker Hub
needs: build
runs-on: self-hosted
strategy:
matrix:
BASE: ["jammy"]
steps:
- name: Check out the repo
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
platforms: linux/amd64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/arm64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM64 }}
platforms: linux/arm64
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to Github Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted-common)
uses: docker/build-push-action@v4
with:
build-args: |
BASE=ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-full
context: install/docker/
file: install/docker/Dockerfile.nvidia
platforms: linux/amd64,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.BASE }}-nvidia
ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-nvidia
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
ghcr.io/koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -20,10 +20,10 @@ jobs:
strategy:
matrix:
BASE: [
["jammy-nvidia", ".s6"],
["jammy-full", ".s6"],
["jammy-lite", ""],
"20-jammy-full",
"20-jammy-lite",
]
SUPERVISOR: ["", ".s6"]
steps:
- name: Check out the repo
uses: actions/checkout@v3
@@ -42,26 +42,12 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
platforms: linux/amd64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
@@ -87,23 +73,23 @@ jobs:
uses: docker/build-push-action@v4
with:
build-args: |
BASE=${{ matrix.BASE[0] }}
BASE=${{ matrix.BASE }}
SCRYPTED_INSTALL_VERSION=${{ steps.package-version.outputs.NPM_VERSION }}
context: install/docker/
file: install/docker/Dockerfile${{ matrix.BASE[1] }}
file: install/docker/Dockerfile${{ matrix.SUPERVISOR }}
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ format('koush/scrypted:v{1}-{0}', matrix.BASE[0], github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE[0] == 'jammy-full' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'jammy-nvidia' && 'koush/scrypted:nvidia' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'jammy-full' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'jammy-lite' && 'koush/scrypted:lite' || '' }}
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
${{ format('ghcr.io/koush/scrypted:v{1}-{0}', matrix.BASE[0], github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE[0] == 'jammy-full' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'jammy-nvidia' && 'ghcr.io/koush/scrypted:nvidia' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'jammy-full' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'jammy-lite' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1 +0,0 @@
../../../../sdk/dist/src/settings-mixin.d.ts

View File

@@ -1 +0,0 @@
../../../../sdk/dist/src/storage-settings.d.ts

View File

@@ -1,10 +1,9 @@
import sdk, { MixinDeviceBase, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors, ScryptedMimeTypes } from "@scrypted/sdk";
import { StorageSettings } from "@scrypted/sdk/storage-settings";
import { SettingsMixinDeviceBase } from "@scrypted/sdk/settings-mixin";
import sdk, { MixinDeviceBase, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors } from "@scrypted/sdk";
import fs from 'fs';
import type { TranspileOptions } from "typescript";
import vm from "vm";
import { ScriptDevice } from "./monaco/script-device";
import path from 'path';
const { systemManager, deviceManager, mediaManager, endpointManager } = sdk;
@@ -29,13 +28,9 @@ export function readFileAsString(f: string) {
}
function getTypeDefs() {
const settingsMixinDefs = readFileAsString('@types/sdk/settings-mixin.d.ts');
const storageSettingsDefs = readFileAsString('@types/sdk/storage-settings.d.ts');
const scryptedTypesDefs = readFileAsString('@types/sdk/types.d.ts');
const scryptedIndexDefs = readFileAsString('@types/sdk/index.d.ts');
return {
settingsMixinDefs,
storageSettingsDefs,
scryptedIndexDefs,
scryptedTypesDefs,
};
@@ -69,7 +64,6 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
fs: require('realfs'),
ScryptedDeviceBase,
MixinDeviceBase,
StorageSettings,
systemManager,
deviceManager,
endpointManager,
@@ -79,8 +73,6 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
localStorage: device.storage,
device,
exports: {} as any,
SettingsMixinDeviceBase,
ScryptedMimeTypes,
ScryptedInterface,
ScryptedDeviceType,
// @ts-expect-error
@@ -181,16 +173,6 @@ export function createMonacoEvalDefaults(extraLibs: { [lib: string]: string }) {
"node_modules/@types/scrypted__sdk/types/index.d.ts"
);
monaco.languages.typescript.typescriptDefaults.addExtraLib(
libs['settingsMixin'],
"node_modules/@types/scrypted__sdk/settings-mixin.d.ts"
);
monaco.languages.typescript.typescriptDefaults.addExtraLib(
libs['storageSettings'],
"node_modules/@types/scrypted__sdk/storage-settings.d.ts"
);
monaco.languages.typescript.typescriptDefaults.addExtraLib(
libs['sdk'],
"node_modules/@types/scrypted__sdk/index.d.ts"

View File

@@ -136,17 +136,12 @@ export async function readLine(readable: Readable) {
}
export async function readString(readable: Readable | Promise<Readable>) {
const buffer = await readBuffer(readable);
return buffer.toString();
}
export async function readBuffer(readable: Readable | Promise<Readable>) {
const buffers: Buffer[] = [];
let data = '';
readable = await readable;
readable.on('data', buffer => {
buffers.push(buffer);
data += buffer.toString();
});
readable.resume();
await once(readable, 'end')
return Buffer.concat(buffers);
return data;
}

View File

@@ -1,7 +1,6 @@
{
"compilerOptions": {
"module": "Node16",
"moduleResolution": "Node16",
"module": "commonjs",
"target": "esnext",
"noImplicitAny": true,
"outDir": "./dist",

View File

@@ -1,12 +1,13 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "20-jammy-full.s6-v0.99.0"
version: "18-jammy-full.s6-v0.93.0"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"
arch:
- amd64
- aarch64
- armv7
init: false
ingress: true
ingress_port: 11080

View File

@@ -7,8 +7,7 @@
# install script.
################################################################
ARG BASE="jammy"
ARG REPO="ubuntu"
FROM ${REPO}:${BASE} as header
FROM ubuntu:${BASE} as header
ENV DEBIAN_FRONTEND=noninteractive

View File

@@ -1,6 +1,14 @@
ARG BASE="ghcr.io/koush/scrypted-common:20-jammy-full"
FROM $BASE
FROM ghcr.io/koush/scrypted:20-jammy-full.s6
# nvidia cudnn/libcublas etc.
# for some reason this is not provided by the nvidia container toolkit
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-nvidia-graphics.sh | bash
WORKDIR /
# Install miniconda
ENV CONDA_DIR /opt/conda
RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \
/bin/bash ~/miniconda.sh -b -p /opt/conda
# Put conda in path so we can use conda activate
ENV PATH=$CONDA_DIR/bin:$PATH
RUN conda install -c conda-forge cudatoolkit=11.2.2 cudnn=8.1.0
ENV CONDA_PREFIX=/opt/conda
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/

View File

@@ -1,3 +1,5 @@
version: "3.5"
# The Scrypted docker-compose.yml file typically resides at:
# ~/.scrypted/docker-compose.yml
@@ -38,21 +40,14 @@ services:
# See volumes section below to use the host daemon.
# - SCRYPTED_DOCKER_AVAHI=true
# NVIDIA (Part 1 of 4)
# Uncomment next 3 lines for Nvidia GPU support.
# - NVIDIA_VISIBLE_DEVICES=all
# - NVIDIA_DRIVER_CAPABILITIES=all
# NVIDIA (Part 2 of 4)
# runtime: nvidia
# NVIDIA (Part 3 of 4) - Use NVIDIA image, and remove subsequent default image.
# image: ghcr.io/koush/scrypted:nvidia
image: ghcr.io/koush/scrypted
# Necessary to communicate with host dbus for avahi-daemon.
security_opt:
- apparmor:unconfined
volumes:
# NVIDIA (Part 4 of 4)
# - /etc/OpenCL/vendors/nvidia.icd:/etc/OpenCL/vendors/nvidia.icd
# Scrypted NVR Storage (Part 3 of 3)
# Modify to add the additional volume for Scrypted NVR.
@@ -99,16 +94,15 @@ services:
container_name: scrypted
restart: unless-stopped
network_mode: host
image: ghcr.io/koush/scrypted
# logging is noisy and will unnecessarily wear on flash storage.
# scrypted has per device in memory logging that is preferred.
# enable the log file if enhanced debugging is necessary.
logging:
driver: "none"
# driver: "json-file"
# options:
# max-size: "10m"
# max-file: "10"
driver: "json-file"
options:
max-size: "10m"
max-file: "10"
labels:
- "com.centurylinklabs.watchtower.scope=scrypted"

View File

@@ -1,16 +0,0 @@
if [ "$(uname -m)" = "x86_64" ]
then
echo "Installing NVIDIA graphics packages."
apt update -q \
&& apt install -y wget \
&& wget -qO /cuda-keyring.deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/$(uname -m)/cuda-keyring_1.1-1_all.deb \
&& dpkg -i /cuda-keyring.deb \
&& apt update -q \
&& apt install -y cuda-nvcc-11-8 libcublas-11-8 libcudnn8 cuda-libraries-11-8 \
&& apt install -y cuda-nvcc-12-4 libcublas-12-4 libcudnn8 cuda-libraries-12-4;
exit $?
else
echo "NVIDIA graphics will not be installed on this architecture."
fi
exit 0

View File

@@ -110,12 +110,10 @@ User=$SERVICE_USER
Group=$SERVICE_USER
Type=simple
ExecStart=/usr/bin/npx -y scrypted serve
Restart=always
Restart=on-failure
RestartSec=3
Environment="NODE_OPTIONS=$NODE_OPTIONS"
Environment="SCRYPTED_INSTALL_ENVIRONMENT=$SCRYPTED_INSTALL_ENVIRONMENT"
StandardOutput=null
StandardError=null
[Install]
WantedBy=multi-user.target

View File

@@ -34,8 +34,7 @@ $SCRYPTED_HOME_ESCAPED_PATH = $SCRYPTED_HOME.replace('\', '\\')
npm install --prefix $SCRYPTED_HOME @koush/node-windows --save
$NPX_PATH = (Get-Command npx).Path
# The path needs double quotes to handle spaces in the directory path
$NPX_PATH_ESCAPED = '"' + $NPX_PATH.replace('\', '\\') + '"'
$NPX_PATH_ESCAPED = $NPX_PATH.replace('\', '\\')
$SERVICE_JS = @"
const fs = require('fs');
@@ -45,10 +44,8 @@ try {
catch (e) {
}
const child_process = require('child_process');
child_process.spawn('$NPX_PATH_ESCAPED', ['-y', 'scrypted', 'serve'], {
child_process.spawn('$($NPX_PATH_ESCAPED)', ['-y', 'scrypted', 'serve'], {
stdio: 'inherit',
// allow spawning .cmd https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
shell: true,
});
"@

View File

@@ -10,7 +10,7 @@ function readyn() {
}
cd /tmp
SCRYPTED_VERSION=v0.96.0
SCRYPTED_VERSION=v0.93.0
SCRYPTED_TAR_ZST=scrypted-$SCRYPTED_VERSION.tar.zst
if [ -z "$VMID" ]
then

View File

@@ -1,12 +1,12 @@
{
"name": "scrypted",
"version": "1.3.15",
"version": "1.3.13",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "scrypted",
"version": "1.3.15",
"version": "1.3.13",
"license": "ISC",
"dependencies": {
"@scrypted/client": "^1.3.3",

View File

@@ -1,6 +1,6 @@
{
"name": "scrypted",
"version": "1.3.15",
"version": "1.3.13",
"description": "",
"main": "./dist/packages/cli/src/main.js",
"bin": {

View File

@@ -24,8 +24,6 @@ async function runCommand(command: string, ...args: string[]) {
// https://github.com/lovell/sharp/blob/eefaa998725cf345227d94b40615e090495c6d09/lib/libvips.js#L115C19-L115C46
SHARP_IGNORE_GLOBAL_LIBVIPS: 'true',
},
// allow spawning .cmd https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
shell: os.platform() === 'win32' ? true : undefined,
});
await once(cp, 'exit');
if (cp.exitCode)
@@ -135,7 +133,11 @@ export async function serveMain(installVersion?: string) {
await startServer(installDir);
if (fs.existsSync(UPDATE_FILE)) {
if (fs.existsSync(EXIT_FILE)) {
console.log('Exiting.');
process.exit(1);
}
else if (fs.existsSync(UPDATE_FILE)) {
console.log('Update requested. Installing.');
await runCommandEatError('npm', '--prefix', installDir, 'install', '--production', '@scrypted/server@latest').catch(e => {
console.error('Update failed', e);
@@ -143,10 +145,6 @@ export async function serveMain(installVersion?: string) {
console.log('Exiting.');
process.exit(1);
}
else if (fs.existsSync(EXIT_FILE)) {
console.log('Exiting.');
process.exit(1);
}
else {
console.log(`Service unexpectedly exited. Restarting momentarily.`);
await sleep(10000);

View File

@@ -1,4 +1,4 @@
{
"scrypted.debugHost": "127.0.0.1",
"scrypted.debugHost": "scrypted-server",
}

View File

@@ -1,11 +1,6 @@
<details>
<summary>Changelog</summary>
### 0.3.1
alexa/google-home: fix potential vulnerability. do not allow local network control using cloud tokens belonging to a different user. the plugins are now locked to a specific scrypted cloud account once paired.
### 0.3.0
alexa/google-home: additional auth token checks to harden endpoints for cloud sharing

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/alexa",
"version": "0.3.2",
"version": "0.3.1",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/alexa",
"version": "0.3.2",
"version": "0.3.1",
"dependencies": {
"axios": "^1.3.4",
"uuid": "^9.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/alexa",
"version": "0.3.2",
"version": "0.3.1",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -27,7 +27,6 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
json: true
},
syncedDevices: {
defaultValue: [],
multiple: true,
hide: true
},
@@ -67,10 +66,7 @@ class AlexaPlugin extends ScryptedDeviceBase implements HttpRequestHandler, Mixi
alexaHandlers.set('Alexa.Authorization/AcceptGrant', this.onAlexaAuthorization);
alexaHandlers.set('Alexa.Discovery/Discover', this.onDiscoverEndpoints);
this.start()
.catch(e => {
this.console.error('startup failed', e);
})
this.start();
}
async start() {

View File

@@ -10,7 +10,7 @@
"port": 10081,
"request": "attach",
"skipFiles": [
"**/plugin-console.*",
"**/plugin-remote-worker.*",
"<node_internals>/**"
],
"preLaunchTask": "scrypted: deploy+debug",

View File

@@ -1,21 +1,19 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.150",
"version": "0.0.135",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.150",
"version": "0.0.135",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"content-type": "^1.0.5"
"@scrypted/sdk": "file:../../sdk"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^20.10.8"
}
},
"../../common": {
@@ -25,22 +23,23 @@
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/node": "^20.10.8",
"ts-node": "^10.9.2"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.29",
"version": "0.3.4",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
@@ -78,29 +77,15 @@
"resolved": "../../sdk",
"link": true
},
"node_modules/@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"node_modules/@types/node": {
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"version": "20.10.8",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.10.8.tgz",
"integrity": "sha512-f8nQs3cLxbAFc00vEU59yf9UyGUftkPaLGfvbVOIDdx2i1b8epBqj2aNGyP19fiyXWvlmZ7qC1XLjAzw/OKIeA==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.150",
"version": "0.0.135",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -36,11 +36,9 @@
},
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"content-type": "^1.0.5"
"@scrypted/sdk": "file:../../sdk"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^20.10.8"
}
}

View File

@@ -1,140 +1,10 @@
import { AuthFetchCredentialState, HttpFetchOptions, authHttpFetch } from '@scrypted/common/src/http-auth-fetch';
import { readLine } from '@scrypted/common/src/read-stream';
import { parseHeaders, readBody } from '@scrypted/common/src/rtsp-server';
import contentType from 'content-type';
import { IncomingMessage } from 'http';
import { EventEmitter, Readable } from 'stream';
import { Destroyable } from '../../rtsp/src/rtsp';
import { Readable } from 'stream';
import { getDeviceInfo } from './probe';
import { Point } from '@scrypted/sdk';
// Human
// {
// "Action" : "Cross",
// "Class" : "Normal",
// "CountInGroup" : 1,
// "DetectRegion" : [
// [ 455, 260 ],
// [ 3586, 260 ],
// [ 3768, 7580 ],
// [ 382, 7451 ]
// ],
// "Direction" : "Enter",
// "EventID" : 10181,
// "GroupID" : 0,
// "Name" : "Rule1",
// "Object" : {
// "Action" : "Appear",
// "BoundingBox" : [ 2856, 1280, 3880, 4880 ],
// "Center" : [ 3368, 3080 ],
// "Confidence" : 0,
// "LowerBodyColor" : [ 0, 0, 0, 0 ],
// "MainColor" : [ 0, 0, 0, 0 ],
// "ObjectID" : 863,
// "ObjectType" : "Human",
// "RelativeID" : 0,
// "Speed" : 0
// },
// "PTS" : 43380319830.0,
// "RuleID" : 2,
// "Track" : [],
// "UTC" : 1711446999,
// "UTCMS" : 701
// }
// Face
// {
// "CfgRuleId" : 1,
// "Class" : "FaceDetection",
// "CountInGroup" : 2,
// "DetectRegion" : null,
// "EventID" : 10360,
// "EventSeq" : 6,
// "Faces" : [
// {
// "BoundingBox" : [ 1504, 2336, 1728, 2704 ],
// "Center" : [ 1616, 2520 ],
// "ObjectID" : 94,
// "ObjectType" : "HumanFace",
// "RelativeID" : 0
// }
// ],
// "FrameSequence" : 8251212,
// "GroupID" : 6,
// "Mark" : 0,
// "Name" : "FaceDetection",
// "Object" : {
// "Action" : "Appear",
// "BoundingBox" : [ 1504, 2336, 1728, 2704 ],
// "Center" : [ 1616, 2520 ],
// "Confidence" : 19,
// "FrameSequence" : 8251212,
// "ObjectID" : 94,
// "ObjectType" : "HumanFace",
// "RelativeID" : 0,
// "SerialUUID" : "",
// "Source" : 0.0,
// "Speed" : 0,
// "SpeedTypeInternal" : 0
// },
// "Objects" : [
// {
// "Action" : "Appear",
// "BoundingBox" : [ 1504, 2336, 1728, 2704 ],
// "Center" : [ 1616, 2520 ],
// "Confidence" : 19,
// "FrameSequence" : 8251212,
// "ObjectID" : 94,
// "ObjectType" : "HumanFace",
// "RelativeID" : 0,
// "SerialUUID" : "",
// "Source" : 0.0,
// "Speed" : 0,
// "SpeedTypeInternal" : 0
// }
// ],
// "PTS" : 43774941350.0,
// "Priority" : 0,
// "RuleID" : 1,
// "RuleId" : 1,
// "Source" : -1280470024.0,
// "UTC" : 947510337,
// "UTCMS" : 0
// }
export interface AmcrestObjectDetails {
Action: string;
BoundingBox: Point;
Center: Point;
Confidence: number;
LowerBodyColor: [number, number, number, number];
MainColor: [number, number, number, number];
ObjectID: number;
ObjectType: string;
RelativeID: number;
Speed: number;
}
export interface AmcrestEventData {
Action: string;
Class: string;
CountInGroup: number;
DetectRegion: Point[];
Direction: string;
EventID: number;
GroupID: number;
Name: string;
Object: AmcrestObjectDetails;
PTS: number;
RuleID: number;
Track: any[];
UTC: number;
UTCMS: number;
}
export enum AmcrestEvent {
MotionStart = "Code=VideoMotion;action=Start",
MotionStop = "Code=VideoMotion;action=Stop",
MotionInfo = "Code=VideoMotionInfo;action=State",
AudioStart = "Code=AudioMutation;action=Start",
AudioStop = "Code=AudioMutation;action=Stop",
TalkInvite = "Code=_DoTalkAction_;action=Invite",
@@ -148,33 +18,8 @@ export enum AmcrestEvent {
DahuaTalkHangup = "Code=PassiveHungup;action=Start",
DahuaCallDeny = "Code=HungupPhone;action=Pulse",
DahuaTalkPulse = "Code=_CallNoAnswer_;action=Pulse",
FaceDetection = "Code=FaceDetection;action=Start",
SmartMotionHuman = "Code=SmartMotionHuman;action=Start",
SmartMotionVehicle = "Code=Vehicle;action=Start",
CrossLineDetection = "Code=CrossLineDetection;action=Start",
CrossRegionDetection = "Code=CrossRegionDetection;action=Start",
}
async function readAmcrestMessage(client: Readable): Promise<string[]> {
let currentHeaders: string[] = [];
while (true) {
const originalLine = await readLine(client);
const line = originalLine.trim();
if (!line)
return currentHeaders;
// dahua bugs out and sends message without a newline separating the body:
// Content-Length:39
// Code=AudioMutation;action=Start;index=0
if (!line.includes(':')) {
client.unshift(Buffer.from(originalLine + '\n'));
return currentHeaders;
}
currentHeaders.push(line);
}
}
export class AmcrestCameraClient {
credential: AuthFetchCredentialState;
@@ -233,8 +78,7 @@ export class AmcrestCameraClient {
return response.body;
}
async listenEvents(): Promise<Destroyable> {
const events = new EventEmitter();
async listenEvents() {
const url = `http://${this.ip}/cgi-bin/eventManager.cgi?action=attach&codes=[All]`;
console.log('preparing event listener', url);
@@ -242,117 +86,32 @@ export class AmcrestCameraClient {
url,
responseType: 'readable',
});
const stream: IncomingMessage = response.body;
(events as any).destroy = () => {
stream.destroy();
events.removeAllListeners();
};
stream.on('close', () => {
events.emit('close');
});
stream.on('end', () => {
events.emit('end');
});
stream.on('error', e => {
events.emit('error', e);
});
const stream = response.body;
stream.socket.setKeepAlive(true);
const ct = stream.headers['content-type'];
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
(async () => {
while (true) {
let ignore = await readLine(stream);
ignore = ignore.trim();
if (!ignore)
continue;
if (ignore === boundaryEnd)
continue;
// dahua bugs out and sends this.
if (ignore === 'HTTP/1.1 200 OK') {
const message = await readAmcrestMessage(stream);
this.console.log('ignoring dahua http message', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
if (body)
this.console.log('ignoring dahua http body', body);
continue;
}
if (ignore !== boundary) {
this.console.error('expected boundary but found', ignore);
this.console.error(response.headers);
throw new Error('expected boundary');
}
const message = await readAmcrestMessage(stream);
events.emit('data', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
const data = body.toString();
events.emit('data', data);
const parts = data.split(';');
let index: string;
try {
for (const part of parts) {
if (part.startsWith('index')) {
index = part.split('=')[1]?.trim();
}
}
}
catch (e) {
this.console.error('error parsing index', data);
}
let jsonData: any;
try {
for (const part of parts) {
if (part.startsWith('data')) {
jsonData = JSON.parse(part.split('=')[1]?.trim());
}
}
}
catch (e) {
this.console.error('error parsing data', data);
}
for (const event of Object.values(AmcrestEvent)) {
if (data.indexOf(event) !== -1) {
events.emit('event', event, index, data);
if (event === AmcrestEvent.SmartMotionHuman) {
events.emit('smart', 'person', jsonData);
}
else if (event === AmcrestEvent.SmartMotionVehicle) {
events.emit('smart', 'car', jsonData);
}
else if (event === AmcrestEvent.FaceDetection) {
events.emit('smart', 'face', jsonData);
}
else if (event === AmcrestEvent.CrossLineDetection || event === AmcrestEvent.CrossRegionDetection) {
const eventData: AmcrestEventData = jsonData;
if (eventData?.Object?.ObjectType === 'Human') {
events.emit('smart', 'person', eventData);
}
else if (eventData?.Object?.ObjectType === 'Vehicle') {
events.emit('smart', 'car', eventData);
}
}
stream.on('data', (buffer: Buffer) => {
const data = buffer.toString();
const parts = data.split(';');
let index: string;
try {
for (const part of parts) {
if (part.startsWith('index')) {
index = part.split('=')[1]?.trim();
}
}
}
})()
.catch(() => stream.destroy());
return events as any as Destroyable;
catch (e) {
this.console.error('error parsing index', data);
}
// this.console?.log('event', data);
for (const event of Object.values(AmcrestEvent)) {
if (data.indexOf(event) !== -1) {
stream.emit('event', event, index, data);
}
}
});
return stream;
}
async enableContinousRecording(channel: number) {

View File

@@ -1,11 +1,11 @@
import { ffmpegLogInitialOutput } from '@scrypted/common/src/media-helpers';
import { readLength } from "@scrypted/common/src/read-stream";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, Lock, MediaObject, MediaStreamOptions, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, Reboot, RequestPictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, Lock, MediaObject, MediaStreamOptions, Reboot, RequestPictureOptions, RequestRecordingStreamOptions, ResponseMediaStreamOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, VideoCameraConfiguration, VideoRecorder } from "@scrypted/sdk";
import child_process, { ChildProcess } from 'child_process';
import { PassThrough, Readable, Stream } from "stream";
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { AmcrestCameraClient, AmcrestEvent, AmcrestEventData } from "./amcrest-api";
import { AmcrestCameraClient, AmcrestEvent } from "./amcrest-api";
const { mediaManager } = sdk;
@@ -22,13 +22,12 @@ function findValue(blob: string, prefix: string, key: string) {
return parts[1];
}
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, Lock, VideoRecorder, Reboot, ObjectDetector {
class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration, Camera, Intercom, Lock, VideoRecorder, Reboot {
eventStream: Stream;
cp: ChildProcess;
client: AmcrestCameraClient;
videoStreamOptions: Promise<UrlMediaStreamOptions[]>;
onvifIntercom = new OnvifIntercom(this);
hasSmartDetection: boolean;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
@@ -37,7 +36,6 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
this.storage.removeItem('amcrestDoorbell');
}
this.hasSmartDetection = this.storage.getItem('hasSmartDetection') === 'true';
this.updateDevice();
this.updateDeviceInfo();
}
@@ -186,19 +184,10 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
if (idx.toString() !== channelNumber)
return;
}
if (event === AmcrestEvent.MotionStart
|| event === AmcrestEvent.SmartMotionHuman
|| event === AmcrestEvent.SmartMotionVehicle
|| event === AmcrestEvent.CrossLineDetection
|| event === AmcrestEvent.CrossRegionDetection) {
if (event === AmcrestEvent.MotionStart) {
this.motionDetected = true;
resetMotionTimeout();
}
else if (event === AmcrestEvent.MotionInfo) {
// this seems to be a motion pulse
if (this.motionDetected)
resetMotionTimeout();
}
else if (event === AmcrestEvent.MotionStop) {
// use resetMotionTimeout
}
@@ -242,43 +231,9 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
});
events.on('smart', (className: string, data: AmcrestEventData) => {
if (!this.hasSmartDetection) {
this.hasSmartDetection = true;
this.storage.setItem('hasSmartDetection', 'true');
this.updateDevice();
}
const detected: ObjectsDetected = {
timestamp: Date.now(),
detections: [
{
score: 1,
className,
}
],
};
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detected);
});
return events;
}
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
return;
}
async getObjectTypes(): Promise<ObjectDetectionTypes> {
return {
classes: [
'person',
'face',
'car',
],
}
}
async getOtherSettings(): Promise<Setting[]> {
const ret = await super.getOtherSettings();
ret.push(
@@ -517,19 +472,13 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
if (isDoorbell || twoWayAudio) {
interfaces.push(ScryptedInterface.Intercom);
}
const enableDahuaLock = this.storage.getItem('enableDahuaLock') === 'true';
if (isDoorbell && doorbellType === DAHUA_DOORBELL_TYPE && enableDahuaLock) {
interfaces.push(ScryptedInterface.Lock);
}
const continuousRecording = this.storage.getItem('continuousRecording') === 'true';
if (continuousRecording)
interfaces.push(ScryptedInterface.VideoRecorder);
if (this.hasSmartDetection)
interfaces.push(ScryptedInterface.ObjectDetector);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
@@ -572,7 +521,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
const doorbellType = this.storage.getItem('doorbellType');
// not sure if this all works, since i don't actually have a doorbell.
// good luck!
const channel = this.getRtspChannel() || '1';
@@ -599,22 +548,12 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
else {
args.push(
"-vn",
'-acodec', 'aac',
'-f', 'adts',
'pipe:3',
);
"-vn",
'-acodec', 'aac',
'-f', 'adts',
'pipe:3',
);
contentType = 'Audio/AAC';
// args.push(
// "-vn",
// '-acodec', 'pcm_mulaw',
// '-ac', '1',
// '-ar', '8000',
// '-sample_fmt', 's16',
// '-f', 'mulaw',
// 'pipe:3',
// );
// contentType = 'Audio/G.711A';
}
this.console.log('ffmpeg intercom', args);
@@ -634,19 +573,15 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
// seems the dahua doorbells preferred 1024 chunks. should investigate adts
// parsing and sending multipart chunks instead.
const passthrough = new PassThrough();
const abortController = new AbortController();
this.getClient().request({
url,
method: 'POST',
headers: {
'Content-Type': contentType,
'Content-Length': '9999999',
'Content-Length': '9999999'
},
signal: abortController.signal,
responseType: 'readable',
}, passthrough)
.catch(() => { })
.finally(() => this.console.log('request finished'))
}, passthrough);
try {
while (true) {
@@ -658,8 +593,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
}
finally {
this.console.log('audio finished');
passthrough.destroy();
abortController.abort();
passthrough.end();
}
this.stopIntercom();

View File

@@ -29,14 +29,9 @@ export async function getDeviceInfo(credential: AuthFetchCredentialState, addres
vals[k] = v.trim();
}
const ret = {
return {
deviceType: vals.deviceType,
hardwareVersion: vals.hardwareVersion,
serialNumber: vals.serialNumber,
};
if (!ret.deviceType && !ret.hardwareVersion && !ret.serialNumber)
throw new Error('not amcrest');
return ret;
}
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/bticino",
"version": "0.0.15",
"version": "0.0.13",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/bticino",
"version": "0.0.15",
"version": "0.0.13",
"dependencies": {
"@slyoldfox/sip": "^0.0.6-1",
"sdp": "^3.0.3",
@@ -30,23 +30,23 @@
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"typescript": "^5.3.3"
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^4.4.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"ts-node": "^10.9.2"
"@types/node": "^16.9.0"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.14",
"version": "0.3.2",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
@@ -1219,10 +1219,10 @@
"requires": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"@types/node": "^20.11.0",
"http-auth-utils": "^5.0.1",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
"@types/node": "^16.9.0",
"http-auth-utils": "^3.0.2",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^4.4.3"
}
},
"@scrypted/sdk": {
@@ -1232,7 +1232,7 @@
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/bticino",
"version": "0.0.15",
"version": "0.0.13",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",
"prescrypted-setup-project": "scrypted-package-json",

View File

@@ -17,12 +17,6 @@ import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from
import { PersistentSipManager } from './persistent-sip-manager';
import { InviteHandler } from './bticino-inviteHandler';
import { SipOptions, SipRequest } from '../../sip/src/sip-manager';
import fs from "fs"
import url from "url"
import path from 'path';
import { default as stream } from 'node:stream'
import type { ReadableStream } from 'node:stream/web'
import { finished } from "stream/promises";
import { get } from 'http'
import { ControllerApi } from './c300x-controller-api';
@@ -31,7 +25,6 @@ import { BticinoMuteSwitch } from './bticino-mute-switch';
const STREAM_TIMEOUT = 65000;
const { mediaManager } = sdk;
const BTICINO_CLIPS = path.join(process.env.SCRYPTED_PLUGIN_VOLUME, 'bticino-clips');
export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor, DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips, Reboot {
@@ -154,87 +147,11 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements MotionSensor
});
}
async getVideoClip(videoId: string): Promise<MediaObject> {
const outputfile = await this.fetchAndConvertVoicemailMessage(videoId);
const fileURLToPath: string = url.pathToFileURL(outputfile).toString()
this.console.log(`Creating mediaObject for url: ${fileURLToPath}`)
return await mediaManager.createMediaObjectFromUrl(fileURLToPath);
}
private async fetchAndConvertVoicemailMessage(videoId: string) {
getVideoClip(videoId: string): Promise<MediaObject> {
let c300x = SipHelper.getIntercomIp(this)
const response = await fetch(`http://${c300x}:8080/voicemail?msg=${videoId}/aswm.avi&raw=true`);
const contentLength: number = Number(response.headers.get("Content-Length"));
const lastModified: Date = new Date(response.headers.get("Last-Modified-Time"));
const avifile = `${BTICINO_CLIPS}/${videoId}.avi`;
const outputfile = `${BTICINO_CLIPS}/${videoId}.mp4`;
if (!fs.existsSync(BTICINO_CLIPS)) {
this.console.log(`Creating clips dir at: ${BTICINO_CLIPS}`)
fs.mkdirSync(BTICINO_CLIPS);
}
if (fs.existsSync(avifile)) {
const stat = fs.statSync(avifile);
if (stat.size != contentLength || stat.mtime.getTime() != lastModified.getTime()) {
this.console.log(`Size ${stat.size} != ${contentLength} or time ${stat.mtime.getTime} != ${lastModified.getTime}`)
try {
fs.rmSync(avifile);
} catch (e) { }
try {
fs.rmSync(outputfile);
} catch (e) { }
} else {
this.console.log(`Keeping the cached video at ${avifile}`)
}
}
if (!fs.existsSync(avifile)) {
this.console.log("Starting download.")
await finished(stream.Readable.from(response.body as ReadableStream<Uint8Array>).pipe(fs.createWriteStream(avifile)));
this.console.log("Download finished.")
try {
this.console.log(`Setting mtime to ${lastModified}`)
fs.utimesSync(avifile, lastModified, lastModified);
} catch (e) { }
}
const ffmpegPath = await mediaManager.getFFmpegPath();
const ffmpegArgs = [
'-hide_banner',
'-nostats',
'-y',
'-i', avifile,
outputfile
];
safePrintFFmpegArguments(console, ffmpegArgs);
const cp = child_process.spawn(ffmpegPath, ffmpegArgs, {
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
});
const p = new Promise((resolveFunc) => {
cp.stdout.on("data", (x) => {
this.console.log(x.toString());
});
cp.stderr.on("data", (x) => {
this.console.error(x.toString());
});
cp.on("exit", (code) => {
resolveFunc(code);
});
});
let returnCode = await p;
this.console.log(`Converted file returned code: ${returnCode}`);
return outputfile;
const url = `http://${c300x}:8080/voicemail?msg=${videoId}/aswm.avi&raw=true`;
return mediaManager.createMediaObjectFromUrl(url);
}
getVideoClipThumbnail(thumbnailId: string): Promise<MediaObject> {
let c300x = SipHelper.getIntercomIp(this)
const url = `http://${c300x}:8080/voicemail?msg=${thumbnailId}/aswm.jpg&raw=true`;

View File

@@ -33,10 +33,8 @@ export class VoicemailHandler extends SipRequestHandler {
handle(request: SipRequest) {
const lastVoicemailMessageTimestamp : number = Number.parseInt( this.sipCamera.storage.getItem('lastVoicemailMessageTimestamp') ) || -1
const message : string = request.content.toString()
let matches : Array<RegExpMatchArray> = [...message.matchAll(/\*#8\*\*40\*([01])\*([01])\*/gm)]
if( matches && matches.length > 0 && matches[0].length > 0 ) {
this.sipCamera.console.debug( "Answering machine state: " + matches[0][1] + " / Welcome message state: " + matches[0][2] );
this.aswmIsEnabled = matches[0][1] == '1';
if( message.startsWith('*#8**40*0*0*') || message.startsWith('*#8**40*1*0*') ) {
this.aswmIsEnabled = message.startsWith('*#8**40*1*0*');
if( this.isEnabled() ) {
this.sipCamera.console.debug("Handling incoming answering machine reply")
const messages : string[] = message.split(';')
@@ -62,8 +60,6 @@ export class VoicemailHandler extends SipRequestHandler {
this.sipCamera.console.debug("No new messages since: " + lastVoicemailMessageTimestamp + " lastMessage: " + lastMessageTimestamp)
}
}
} else {
this.sipCamera.console.debug("Not handling message: " + message)
}
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/chromecast",
"version": "0.1.58",
"version": "0.1.57",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/chromecast",
"version": "0.1.58",
"version": "0.1.57",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/chromecast",
"version": "0.1.58",
"version": "0.1.57",
"description": "Send video, audio, and text to speech notifications to Chromecast and Google Home devices",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -183,7 +183,7 @@ class CastDevice extends ScryptedDeviceBase implements MediaPlayer, Refresh, Eng
media = await mediaManager.createMediaObjectFromUrl(media);
}
}
else if (options?.mimeType?.startsWith('image/') || options?.mimeType?.startsWith('audio/')) {
else if (options?.mimeType?.startsWith('image/')) {
url = await mediaManager.convertMediaObjectToInsecureLocalUrl(media, options?.mimeType);
}

View File

@@ -15,8 +15,6 @@ Environment="SCRYPTED_PYTHON39_PATH=/usr/bin/python3.9"
Environment="SCRYPTED_PYTHON310_PATH=/usr/bin/python3.10"
Environment="SCRYPTED_FFMPEG_PATH=/usr/bin/ffmpeg"
Environment="SCRYPTED_INSTALL_ENVIRONMENT=lxc"
StandardOutput=null
StandardError=null
[Install]
WantedBy=multi-user.target

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/core",
"version": "0.3.24",
"version": "0.3.18",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/core",
"version": "0.3.24",
"version": "0.3.18",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/core",
"version": "0.3.24",
"version": "0.3.18",
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -18,7 +18,7 @@ const { systemManager, deviceManager, endpointManager } = sdk;
export function getAddresses() {
const addresses: string[] = [];
for (const [iface, nif] of Object.entries(os.networkInterfaces())) {
if (iface.startsWith('en') || iface.startsWith('eth') || iface.startsWith('wlan') || iface.startsWith('net')) {
if (iface.startsWith('en') || iface.startsWith('eth') || iface.startsWith('wlan')) {
addresses.push(iface);
addresses.push(...nif.map(addr => addr.address));
}

View File

@@ -11,7 +11,7 @@ export async function checkLxcDependencies() {
let needRestart = false;
if (!process.version.startsWith('v20.')) {
const cp = child_process.spawn('sh', ['-c', 'apt update -y && curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && apt install -y nodejs']);
const cp = child_process.spawn('sh', ['-c', 'curl -fsSL https://deb.nodesource.com/setup_20.x | bash - && apt install -y nodejs']);
const [exitCode] = await once(cp, 'exit');
if (exitCode !== 0)
sdk.log.a('Failed to install Node.js 20.x.');

View File

@@ -1,9 +1,9 @@
{
"compilerOptions": {
"module": "Node16",
"moduleResolution": "Node16",
"target": "esnext",
"module": "commonjs",
"target": "ES2021",
"resolveJsonModule": true,
"moduleResolution": "Node16",
"esModuleInterop": true,
"sourceMap": true
},

View File

@@ -57,10 +57,7 @@ export default {
t += `<tspan x='${x}' dy='${toffset}em'>${Math.round(detection.score * 100) / 100}</tspan>`
toffset -= 1.2;
}
const tname = detection.className
+ (detection.id || detection.label ? ':' : '')
+ (detection.id ? ` ${detection.id}` : '')
+ (detection.label ? ` ${detection.label}` : '')
const tname = detection.className + (detection.id ? `: ${detection.id}` : '')
t += `<tspan x='${x}' dy='${toffset}em'>${tname}</tspan>`
const fs = 30 * svgScale;

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/coreml",
"version": "0.1.49",
"version": "0.1.29",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/coreml",
"version": "0.1.49",
"version": "0.1.29",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -34,7 +34,6 @@
"type": "API",
"interfaces": [
"Settings",
"DeviceProvider",
"ObjectDetection",
"ObjectDetectionPreview"
]
@@ -42,5 +41,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.1.49"
"version": "0.1.29"
}

View File

@@ -1 +0,0 @@
../../openvino/src/common

View File

@@ -1,47 +1,24 @@
from __future__ import annotations
import ast
import asyncio
import concurrent.futures
import os
import re
from typing import Any, List, Tuple
from typing import Any, Tuple
import coremltools as ct
import scrypted_sdk
from PIL import Image
from scrypted_sdk import Setting, SettingValue
from common import yolo
from coreml.face_recognition import CoreMLFaceRecognition
import yolo
from predict import Prediction, PredictPlugin, Rectangle
try:
from coreml.text_recognition import CoreMLTextRecognition
except:
CoreMLTextRecognition = None
from predict import Prediction, PredictPlugin
from predict.rectangle import Rectangle
predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "CoreML-Predict")
availableModels = [
"Default",
"scrypted_yolov9c_320",
"scrypted_yolov9c",
"scrypted_yolov6n_320",
"scrypted_yolov6n",
"scrypted_yolov6s_320",
"scrypted_yolov6s",
"scrypted_yolov8n_320",
"scrypted_yolov8n",
"ssdlite_mobilenet_v2",
"yolov4-tiny",
]
predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "CoreML-Predict")
def parse_label_contents(contents: str):
lines = contents.split(",")
lines = [line for line in lines if line.strip()]
lines = contents.splitlines()
ret = {}
for row_number, content in enumerate(lines):
pair = re.split(r"[:\s]+", content.strip(), maxsplit=1)
@@ -52,49 +29,40 @@ def parse_label_contents(contents: str):
return ret
def parse_labels(userDefined):
yolo = userDefined.get("names") or userDefined.get("yolo.names")
if yolo:
j = ast.literal_eval(yolo)
ret = {}
for k, v in j.items():
ret[int(k)] = v
return ret
classes = userDefined.get("classes")
if not classes:
raise Exception("no classes found in model metadata")
return parse_label_contents(classes)
class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProvider):
class CoreMLPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
model = self.storage.getItem("model") or "Default"
if model == "Default" or model not in availableModels:
if model != "Default":
self.storage.setItem("model", "Default")
model = "scrypted_yolov9c_320"
if model == "Default":
model = "yolov8n_320"
self.yolo = "yolo" in model
self.scrypted_yolo = "scrypted_yolo" in model
self.scrypted_model = "scrypted" in model
model_version = "v7"
mlmodel = "model" if self.scrypted_yolo else model
self.yolov8 = "yolov8" in model
self.yolov9 = "yolov9" in model
model_version = "v2"
print(f"model: {model}")
if not self.yolo:
# todo convert these to mlpackage
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_labels.txt",
"coco_labels.txt",
)
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{mlmodel}.mlmodel",
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
else:
if self.scrypted_yolo:
if self.yolov8:
modelFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/{model}.mlmodel",
f"{model}.mlmodel",
)
elif self.yolov9:
files = [
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
@@ -109,7 +77,7 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/FeatureDescriptions.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/Metadata.json",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{model}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
@@ -120,60 +88,25 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
)
modelFile = os.path.dirname(p)
labelsFile = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{model}/coco_80cl.txt",
f"{model_version}/{model}/coco_80cl.txt",
)
self.model = ct.models.MLModel(modelFile)
self.modelspec = self.model.get_spec()
self.inputdesc = self.modelspec.description.input[0]
self.inputheight = self.inputdesc.type.imageType.height
self.inputwidth = self.inputdesc.type.imageType.width
self.input_name = self.model.get_spec().description.input[0].name
self.labels = parse_labels(self.modelspec.description.metadata.userDefined)
labels_contents = open(labelsFile, "r").read()
self.labels = parse_label_contents(labels_contents)
# csv in mobilenet model
# self.modelspec.description.metadata.userDefined['classes']
self.loop = asyncio.get_event_loop()
self.minThreshold = 0.2
asyncio.ensure_future(self.prepareRecognitionModels(), loop=self.loop)
async def prepareRecognitionModels(self):
try:
devices = [
{
"nativeId": "facerecognition",
"type": scrypted_sdk.ScryptedDeviceType.Builtin.value,
"interfaces": [
scrypted_sdk.ScryptedInterface.ObjectDetection.value,
],
"name": "CoreML Face Recognition",
},
]
if CoreMLTextRecognition:
devices.append(
{
"nativeId": "textrecognition",
"type": scrypted_sdk.ScryptedDeviceType.Builtin.value,
"interfaces": [
scrypted_sdk.ScryptedInterface.ObjectDetection.value,
],
"name": "CoreML Text Recognition",
},
)
await scrypted_sdk.deviceManager.onDevicesChanged(
{
"devices": devices,
}
)
except:
pass
async def getDevice(self, nativeId: str) -> Any:
if nativeId == "facerecognition":
return CoreMLFaceRecognition(nativeId)
if nativeId == "textrecognition":
return CoreMLTextRecognition(nativeId)
raise Exception("unknown device")
async def getSettings(self) -> list[Setting]:
model = self.storage.getItem("model") or "Default"
return [
@@ -181,7 +114,14 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
"key": "model",
"title": "Model",
"description": "The detection model used to find objects.",
"choices": availableModels,
"choices": [
"Default",
"ssdlite_mobilenet_v2",
"yolov4-tiny",
"yolov8n",
"yolov8n_320",
"yolov9c_320",
],
"value": model,
},
]
@@ -198,22 +138,22 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
def get_input_size(self) -> Tuple[float, float]:
return (self.inputwidth, self.inputheight)
async def detect_batch(self, inputs: List[Any]) -> List[Any]:
out_dicts = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: self.model.predict(inputs)
)
return out_dicts
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
objs = []
# run in executor if this is the plugin loop
if self.yolo:
out_dict = await self.queue_batch({self.input_name: input})
input_name = "image" if self.yolov8 or self.yolov9 else "input_1"
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: self.model.predict({input_name: input})
)
else:
out_dict = self.model.predict({input_name: input})
if self.scrypted_yolo:
if self.yolov8 or self.yolov9:
results = list(out_dict.values())[0][0]
objs = yolo.parse_yolov9(results)
objs = yolo.parse_yolov8(results)
ret = self.create_detection_result(objs, src_size, cvss)
return ret
@@ -247,12 +187,17 @@ class CoreMLPlugin(PredictPlugin, scrypted_sdk.Settings, scrypted_sdk.DeviceProv
ret = self.create_detection_result(objs, src_size, cvss)
return ret
out_dict = await asyncio.get_event_loop().run_in_executor(
predictExecutor,
lambda: self.model.predict(
if asyncio.get_event_loop() is self.loop:
out_dict = await asyncio.get_event_loop().run_in_executor(
predictExecutor,
lambda: self.model.predict(
{"image": input, "confidenceThreshold": self.minThreshold}
),
)
else:
out_dict = self.model.predict(
{"image": input, "confidenceThreshold": self.minThreshold}
),
)
)
coordinatesList = out_dict["coordinates"].astype(float)

View File

@@ -1,132 +0,0 @@
from __future__ import annotations
import concurrent.futures
import os
import coremltools as ct
import numpy as np
# import Quartz
# from Foundation import NSData, NSMakeSize
# import Vision
from predict.face_recognize import FaceRecognizeDetection
def euclidean_distance(arr1, arr2):
return np.linalg.norm(arr1 - arr2)
def cosine_similarity(vector_a, vector_b):
dot_product = np.dot(vector_a, vector_b)
norm_a = np.linalg.norm(vector_a)
norm_b = np.linalg.norm(vector_b)
similarity = dot_product / (norm_a * norm_b)
return similarity
predictExecutor = concurrent.futures.ThreadPoolExecutor(8, "Vision-Predict")
class CoreMLFaceRecognition(FaceRecognizeDetection):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
def downloadModel(self, model: str):
model_version = "v7"
mlmodel = "model"
files = [
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
for f in files:
p = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{f}",
f"{model_version}/{f}",
)
modelFile = os.path.dirname(p)
model = ct.models.MLModel(modelFile)
inputName = model.get_spec().description.input[0].name
return model, inputName
def predictDetectModel(self, input):
model, inputName = self.detectModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0][0]
return results
def predictFaceModel(self, input):
model, inputName = self.faceModel
out_dict = model.predict({inputName: input})
return out_dict["var_2167"][0]
def predictTextModel(self, input):
model, inputName = self.textModel
out_dict = model.predict({inputName: input})
preds = out_dict["linear_2"]
return preds
# def predictVision(self, input: Image.Image) -> asyncio.Future[list[Prediction]]:
# buffer = input.tobytes()
# myData = NSData.alloc().initWithBytes_length_(buffer, len(buffer))
# input_image = (
# Quartz.CIImage.imageWithBitmapData_bytesPerRow_size_format_options_(
# myData,
# 4 * input.width,
# NSMakeSize(input.width, input.height),
# Quartz.kCIFormatRGBA8,
# None,
# )
# )
# request_handler = Vision.VNImageRequestHandler.alloc().initWithCIImage_options_(
# input_image, None
# )
# loop = self.loop
# future = loop.create_future()
# def detect_face_handler(request, error):
# observations = request.results()
# if error:
# loop.call_soon_threadsafe(future.set_exception, Exception())
# else:
# objs = []
# for o in observations:
# confidence = o.confidence()
# bb = o.boundingBox()
# origin = bb.origin
# size = bb.size
# l = origin.x * input.width
# t = (1 - origin.y - size.height) * input.height
# w = size.width * input.width
# h = size.height * input.height
# prediction = Prediction(
# 0, confidence, from_bounding_box((l, t, w, h))
# )
# objs.append(prediction)
# loop.call_soon_threadsafe(future.set_result, objs)
# request = (
# Vision.VNDetectFaceRectanglesRequest.alloc().initWithCompletionHandler_(
# detect_face_handler
# )
# )
# error = request_handler.performRequests_error_([request], None)
# return future
# async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
# future = await asyncio.get_event_loop().run_in_executor(
# predictExecutor,
# lambda: self.predictVision(input),
# )
# objs = await future
# ret = self.create_detection_result(objs, src_size, cvss)
# return ret

View File

@@ -1,45 +0,0 @@
from __future__ import annotations
import os
import coremltools as ct
from predict.text_recognize import TextRecognition
class CoreMLTextRecognition(TextRecognition):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
def downloadModel(self, model: str):
model_version = "v7"
mlmodel = "model"
files = [
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/weights/weight.bin",
f"{model}/{model}.mlpackage/Data/com.apple.CoreML/{mlmodel}.mlmodel",
f"{model}/{model}.mlpackage/Manifest.json",
]
for f in files:
p = self.downloadFile(
f"https://github.com/koush/coreml-models/raw/main/{f}",
f"{model_version}/{f}",
)
modelFile = os.path.dirname(p)
model = ct.models.MLModel(modelFile)
inputName = model.get_spec().description.input[0].name
return model, inputName
def predictDetectModel(self, input):
model, inputName = self.detectModel
out_dict = model.predict({inputName: input})
results = list(out_dict.values())[0]
return results
def predictTextModel(self, input):
model, inputName = self.textModel
out_dict = model.predict({inputName: input})
preds = out_dict["linear_2"]
return preds

View File

@@ -1 +0,0 @@
opencv-python

View File

@@ -1,3 +1,6 @@
# 2024-04-23 - modify timestamp to force pip reinstall
#
coremltools==7.1
Pillow>=5.4.1
# pillow for anything not intel linux, pillow-simd is available on x64 linux
Pillow>=5.4.1; sys_platform != 'linux' or platform_machine != 'x86_64'
pillow-simd; sys_platform == 'linux' and platform_machine == 'x86_64'

1
plugins/coreml/src/yolo Symbolic link
View File

@@ -0,0 +1 @@
../../openvino/src/yolo

View File

@@ -9,7 +9,4 @@ dist/*.js
dist/*.txt
__pycache__
all_models
sort_oh
download_models.sh
tsconfig.json
.venv

View File

@@ -1,20 +1,16 @@
{
// docker installation
// "scrypted.debugHost": "koushik-ubuntuvm",
// "scrypted.debugHost": "koushik-thin",
// "scrypted.serverRoot": "/server",
"scrypted.debugHost": "koushik-ubuntuvm",
"scrypted.serverRoot": "/home/koush/.scrypted",
// pi local installation
// "scrypted.debugHost": "192.168.2.119",
// "scrypted.serverRoot": "/home/pi/.scrypted",
// local checkout
// "scrypted.debugHost": "127.0.0.1",
// "scrypted.serverRoot": "/Users/koush/.scrypted",
// "scrypted.debugHost": "koushik-winvm",
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
"scrypted.debugHost": "127.0.0.1",
"scrypted.serverRoot": "/Users/koush/.scrypted",
"scrypted.pythonRemoteRoot": "${config:scrypted.serverRoot}/volume/plugin.zip",
"python.analysis.extraPaths": [

3
plugins/dlib/README.md Normal file
View File

@@ -0,0 +1,3 @@
# Dlib Face Recognition for Scrypted
This plugin adds face recognition capabilities to any camera in Scrypted.

BIN
plugins/dlib/fs/black.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -1,48 +1,47 @@
{
"name": "@scrypted/rknn",
"version": "0.0.4",
"name": "@scrypted/tensorflow-lite",
"version": "0.0.18",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/rknn",
"version": "0.0.4",
"name": "@scrypted/tensorflow-lite",
"version": "0.0.18",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.29",
"version": "0.2.39",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"@babel/preset-typescript": "^7.16.7",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"babel-loader": "^9.1.0",
"axios": "^0.21.4",
"babel-loader": "^8.2.3",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
"raw-loader": "^4.0.2",
"rimraf": "^3.0.2",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"typescript": "^4.9.3",
"webpack": "^5.74.0",
"webpack-bundle-analyzer": "^4.5.0"
},
"bin": {
"scrypted-changelog": "bin/scrypted-changelog.js",
"scrypted-debug": "bin/scrypted-debug.js",
"scrypted-deploy": "bin/scrypted-deploy.js",
"scrypted-deploy-debug": "bin/scrypted-deploy-debug.js",
"scrypted-package-json": "bin/scrypted-package-json.js",
"scrypted-readme": "bin/scrypted-readme.js",
"scrypted-setup-project": "bin/scrypted-setup-project.js",
"scrypted-webpack": "bin/scrypted-webpack.js"
},
"devDependencies": {
"@types/node": "^18.11.18",
"@types/node": "^18.11.9",
"@types/stringify-object": "^4.0.0",
"stringify-object": "^3.3.0",
"ts-node": "^10.4.0",
@@ -61,12 +60,12 @@
"@scrypted/sdk": {
"version": "file:../../sdk",
"requires": {
"@babel/preset-typescript": "^7.18.6",
"@types/node": "^18.11.18",
"@babel/preset-typescript": "^7.16.7",
"@types/node": "^18.11.9",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"babel-loader": "^9.1.0",
"axios": "^0.21.4",
"babel-loader": "^8.2.3",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
"ncp": "^2.0.0",
@@ -74,11 +73,10 @@
"rimraf": "^3.0.2",
"stringify-object": "^3.3.0",
"tmp": "^0.2.1",
"ts-loader": "^9.4.2",
"ts-node": "^10.4.0",
"typedoc": "^0.23.21",
"typescript": "^4.9.4",
"webpack": "^5.75.0",
"typescript": "^4.9.3",
"webpack": "^5.74.0",
"webpack-bundle-analyzer": "^4.5.0"
}
}

View File

@@ -1,16 +1,14 @@
{
"name": "@scrypted/rknn",
"description": "Scrypted Rockchip NPU Object Detection",
"name": "@scrypted/dlib",
"description": "Scrypted Face Recognition",
"keywords": [
"scrypted",
"plugin",
"rknn",
"rockchip",
"npu",
"motion",
"object",
"dlib",
"face",
"detect",
"detection",
"recognition",
"people",
"person"
],
@@ -28,22 +26,21 @@
"scrypted-package-json": "scrypted-package-json"
},
"scrypted": {
"name": "Rockchip NPU Object Detection",
"name": "Dlib Face Recognition",
"pluginDependencies": [
"@scrypted/objectdetector"
],
"runtime": "python",
"pythonVersion": {
"default": "3.10"
},
"type": "API",
"interfaces": [
"ObjectDetection",
"ObjectDetectionPreview"
"Camera",
"Settings",
"BufferConverter",
"ObjectDetection"
]
},
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.4"
"version": "0.0.1"
}

View File

@@ -0,0 +1,252 @@
from __future__ import annotations
import re
import scrypted_sdk
from typing import Any, Tuple
from predict import PredictPlugin, Prediction, Rectangle
import os
from PIL import Image
import face_recognition
import numpy as np
from typing import Any, List, Tuple, Mapping
from scrypted_sdk.types import ObjectDetectionModel, ObjectDetectionResult, ObjectsDetected, Setting
from predict import PredictSession
import threading
import asyncio
import base64
import json
import random
import string
from scrypted_sdk import RequestPictureOptions, MediaObject, Setting
import os
import json
def random_string():
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(10))
MIME_TYPE = 'x-scrypted-dlib/x-raw-image'
class DlibPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings):
def __init__(self, nativeId: str | None = None):
super().__init__(MIME_TYPE, nativeId=nativeId)
self.labels = {
0: 'face'
}
self.mutex = threading.Lock()
self.known_faces = {}
self.encoded_faces = {}
self.load_known_faces()
def save_known_faces(self):
j = json.dumps(self.known_faces)
self.storage.setItem('known', j)
def load_known_faces(self):
self.known_faces = {}
self.encoded_faces = {}
try:
self.known_faces = json.loads(self.storage.getItem('known'))
except:
pass
for known in self.known_faces:
encoded = []
self.encoded_faces[known] = encoded
encodings = self.known_faces[known]
for str in encodings:
try:
parsed = base64.decodebytes(bytes(str, 'utf-8'))
encoding = np.frombuffer(parsed, dtype=np.float64)
encoded.append(encoding)
except:
pass
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
pass
def get_input_size(self) -> Tuple[float, float]:
pass
def getTriggerClasses(self) -> list[str]:
return ['person']
def detect_once(self, input: Image.Image, settings: Any, src_size, cvss) -> ObjectsDetected:
nparray = np.array(input.resize((int(input.width / 4), int(input.height / 4))))
with self.mutex:
face_locations = face_recognition.face_locations(nparray)
for idx, face in enumerate(face_locations):
t, r, b, l = face
t *= 4
r *= 4
b *= 4
l *= 4
face_locations[idx] = (t, r, b, l)
nparray = np.array(input)
with self.mutex:
face_encodings = face_recognition.face_encodings(nparray, face_locations)
all_ids = []
all_faces = []
for encoded in self.encoded_faces:
all_ids += ([encoded] * len(self.encoded_faces[encoded]))
all_faces += self.encoded_faces[encoded]
m = {}
for idx, fe in enumerate(face_encodings):
results = list(face_recognition.face_distance(all_faces, fe))
best = 1
if len(results):
best = min(results)
minpos = results.index(best)
if best > .6:
id = random_string() + '.jpg'
print('top face %s' % best)
print('new face %s' % id)
encoded = [fe]
self.encoded_faces[id] = encoded
all_faces += encoded
volume = os.environ['SCRYPTED_PLUGIN_VOLUME']
people = os.path.join(volume, 'unknown')
os.makedirs(people, exist_ok=True)
t, r, b, l = face_locations[idx]
cropped = input.crop((l, t, r, b))
fp = os.path.join(people, id)
cropped.save(fp)
else:
id = all_ids[minpos]
print('has face %s' % id)
m[idx] = id
# return
objs = []
for face in face_locations:
t, r, b, l = face
obj = Prediction(0, 1, Rectangle(
l,
t,
r,
b
))
objs.append(obj)
ret = self.create_detection_result(objs, src_size, ['face'], cvss)
for idx, d in enumerate(ret['detections']):
d['id'] = m.get(idx)
d['name'] = m.get(idx)
return ret
def track(self, detection_session: PredictSession, ret: ObjectsDetected):
pass
async def takePicture(self, options: RequestPictureOptions = None) -> MediaObject:
volume = os.environ['SCRYPTED_PLUGIN_VOLUME']
people = os.path.join(volume, 'unknown')
os.makedirs(people, exist_ok=True)
for unknown in os.listdir(people):
fp = os.path.join(people, unknown)
ret = scrypted_sdk.mediaManager.createMediaObjectFromUrl('file:/' + fp)
return await ret
black = os.path.join(volume, 'zip', 'unzipped', 'fs', 'black.jpg')
ret = scrypted_sdk.mediaManager.createMediaObjectFromUrl('file:/' + black)
return await ret
async def getSettings(self) -> list[Setting]:
ret = []
volume = os.environ['SCRYPTED_PLUGIN_VOLUME']
people = os.path.join(volume, 'unknown')
os.makedirs(people, exist_ok=True)
choices = list(self.known_faces.keys())
for unknown in os.listdir(people):
ret.append(
{
'key': unknown,
'title': 'Name',
'description': 'Associate this thumbnail with an existing person or identify a new person.',
'choices': choices,
'combobox': True,
}
)
ret.append(
{
'key': 'delete',
'title': 'Delete',
'description': 'Delete this face.',
'type': 'button',
}
)
break
if not len(ret):
ret.append(
{
'key': 'unknown',
'title': 'Unknown People',
'value': 'Waiting for unknown person...',
'description': 'There are no more people that need to be identified.',
'readonly': True,
}
)
ret.append(
{
'key': 'known',
'group': 'People',
'title': 'Familiar People',
'description': 'The people known to this plugin.',
'choices': choices,
'multiple': True,
'value': choices,
}
)
return ret
async def putSetting(self, key: str, value: str) -> None:
if key == 'known':
n = {}
for k in value:
n[k] = self.known_faces[k]
self.known_faces = n
self.save_known_faces()
elif value or key == 'delete':
volume = os.environ['SCRYPTED_PLUGIN_VOLUME']
people = os.path.join(volume, 'unknown')
os.makedirs(people, exist_ok=True)
for unknown in os.listdir(people):
fp = os.path.join(people, unknown)
os.remove(fp)
if key != 'delete':
encoded = self.encoded_faces[key]
strs = []
for e in encoded:
strs.append(base64.encodebytes(e.tobytes()).decode())
if not self.known_faces.get(value):
self.known_faces[value] = []
self.known_faces[value] += strs
self.save_known_faces()
break
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Settings.value, None)
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Camera.value, None)

4
plugins/dlib/src/main.py Normal file
View File

@@ -0,0 +1,4 @@
from dlibplugin import DlibPlugin
def create_scrypted_plugin():
return DlibPlugin()

1
plugins/dlib/src/pipeline Symbolic link
View File

@@ -0,0 +1 @@
../../tensorflow-lite/src/pipeline

View File

@@ -0,0 +1,10 @@
# plugin
Pillow>=5.4.1
PyGObject>=3.30.4; sys_platform != 'win32'
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
face-recognition
# sort_oh
scipy
filterpy
numpy

View File

@@ -1,23 +1,22 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.147",
"version": "0.0.137",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/hikvision",
"version": "0.0.147",
"version": "0.0.137",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.14",
"content-type": "^1.0.5",
"xml2js": "^0.6.2"
"@types/xml2js": "^0.4.11",
"lodash": "^4.17.21",
"xml2js": "^0.6.0"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^18.15.11"
}
},
"../../common": {
@@ -28,16 +27,17 @@
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"node-fetch-commonjs": "^3.1.1",
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"@types/node": "^20.10.8",
"ts-node": "^10.9.2"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.29",
"version": "0.3.4",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -83,50 +83,33 @@
"resolved": "../../sdk",
"link": true
},
"node_modules/@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"node_modules/@types/node": {
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"dependencies": {
"undici-types": "~5.26.4"
}
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"node_modules/@types/xml2js": {
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"dependencies": {
"@types/node": "*"
}
},
"node_modules/content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==",
"engines": {
"node": ">= 0.6"
}
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"node_modules/sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"node_modules/xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"dependencies": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"
@@ -150,8 +133,9 @@
"requires": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"@types/node": "^20.11.0",
"@types/node": "^20.10.8",
"http-auth-utils": "^5.0.1",
"node-fetch-commonjs": "^3.1.1",
"ts-node": "^10.9.2",
"typescript": "^5.3.3"
}
@@ -180,47 +164,33 @@
"webpack-bundle-analyzer": "^4.5.0"
}
},
"@types/content-type": {
"version": "1.1.8",
"resolved": "https://registry.npmjs.org/@types/content-type/-/content-type-1.1.8.tgz",
"integrity": "sha512-1tBhmVUeso3+ahfyaKluXe38p+94lovUZdoVfQ3OnJo9uJC42JT7CBoN3k9HYhAae+GwiBYmHu+N9FZhOG+2Pg==",
"dev": true
},
"@types/node": {
"version": "20.11.30",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.30.tgz",
"integrity": "sha512-dHM6ZxwlmuZaRmUPfv1p+KrdD1Dci04FbdEm/9wEMouFqxYoFl5aMkt0VMAUtYRQDyYvD41WJLukhq/ha3YuTw==",
"requires": {
"undici-types": "~5.26.4"
}
"version": "18.15.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.11.tgz",
"integrity": "sha512-E5Kwq2n4SbMzQOn6wnmBjuK9ouqlURrcZDVfbo9ftDDTFt3nk7ZKK4GMOzoYgnpQJKcxwQw+lGaBvvlMo0qN/Q=="
},
"@types/xml2js": {
"version": "0.4.14",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.14.tgz",
"integrity": "sha512-4YnrRemBShWRO2QjvUin8ESA41rH+9nQGLUGZV/1IDhi3SL9OhdpNC/MrulTWuptXKwhx/aDxE7toV0f/ypIXQ==",
"version": "0.4.11",
"resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.11.tgz",
"integrity": "sha512-JdigeAKmCyoJUiQljjr7tQG3if9NkqGUgwEUqBvV0N7LM4HyQk7UXCnusRa1lnvXAEYJ8mw8GtZWioagNztOwA==",
"requires": {
"@types/node": "*"
}
},
"content-type": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz",
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="
"lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
},
"sax": {
"version": "1.2.4",
"resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz",
"integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw=="
},
"undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="
},
"xml2js": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz",
"integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==",
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.0.tgz",
"integrity": "sha512-eLTh0kA8uHceqesPqSE+VvO1CDDJWMwlQfB6LuN6T8w6MaDJ8Txm8P7s5cHD0miF0V+GGTZrDQfxPZQVsur33w==",
"requires": {
"sax": ">=0.6.0",
"xmlbuilder": "~11.0.0"

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/hikvision",
"version": "0.0.147",
"version": "0.0.137",
"description": "Hikvision Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",
@@ -37,12 +37,11 @@
"dependencies": {
"@scrypted/common": "file:../../common",
"@scrypted/sdk": "file:../../sdk",
"@types/xml2js": "^0.4.14",
"content-type": "^1.0.5",
"xml2js": "^0.6.2"
"@types/xml2js": "^0.4.11",
"lodash": "^4.17.21",
"xml2js": "^0.6.0"
},
"devDependencies": {
"@types/content-type": "^1.1.8",
"@types/node": "^20.11.30"
"@types/node": "^18.15.11"
}
}

View File

@@ -1,17 +1,8 @@
import { AuthFetchCredentialState, HttpFetchOptions, authHttpFetch } from '@scrypted/common/src/http-auth-fetch';
import { readLine } from '@scrypted/common/src/read-stream';
import { parseHeaders, readBody, readMessage } from '@scrypted/common/src/rtsp-server';
import contentType from 'content-type';
import { IncomingMessage } from 'http';
import { EventEmitter, Readable } from 'stream';
import { Destroyable } from '../../rtsp/src/rtsp';
import { Readable } from 'stream';
import { getDeviceInfo } from './probe';
export const detectionMap = {
human: 'person',
vehicle: 'car',
}
export function getChannel(channel: string) {
return channel || '101';
}
@@ -24,8 +15,6 @@ export enum HikvisionCameraEvent {
// <eventType>linedetection</eventType>
// <eventState>inactive</eventState>
LineDetection = "<eventType>linedetection</eventType>",
RegionEntrance = "<eventType>regionEntrance</eventType>",
RegionExit = "<eventType>regionExit</eventType>",
// <eventType>fielddetection</eventType>
// <eventState>active</eventState>
// <eventType>fielddetection</eventType>
@@ -42,7 +31,7 @@ export interface HikvisionCameraStreamSetup {
export class HikvisionCameraAPI {
credential: AuthFetchCredentialState;
deviceModel: Promise<string>;
listenerPromise: Promise<Destroyable>;
listenerPromise: Promise<IncomingMessage>;
constructor(public ip: string, username: string, password: string, public console: Console) {
this.credential = {
@@ -140,106 +129,35 @@ export class HikvisionCameraAPI {
return response.body;
}
async listenEvents(): Promise<Destroyable> {
const events = new EventEmitter();
(events as any).destroy = () => { };
async listenEvents() {
// support multiple cameras listening to a single single stream
if (!this.listenerPromise) {
const url = `http://${this.ip}/ISAPI/Event/notification/alertStream`;
let lastSmartDetection: string;
this.listenerPromise = this.request({
url,
responseType: 'readable',
}).then(response => {
const stream: IncomingMessage = response.body;
(events as any).destroy = () => {
stream.destroy();
events.removeAllListeners();
};
stream.on('close', () => {
this.listenerPromise = undefined;
events.emit('close');
});
stream.on('end', () => {
this.listenerPromise = undefined;
events.emit('end');
});
stream.on('error', e => {
this.listenerPromise = undefined;
events.emit('error', e);
});
const stream = response.body;
stream.socket.setKeepAlive(true);
const ct = stream.headers['content-type'];
// make content type parsable as content disposition filename
const cd = contentType.parse(ct);
let { boundary } = cd.parameters;
boundary = `--${boundary}`;
const boundaryEnd = `${boundary}--`;
(async () => {
while (true) {
let ignore = await readLine(stream);
ignore = ignore.trim();
if (!ignore)
continue;
if (ignore === boundaryEnd)
continue;
if (ignore !== boundary) {
this.console.error('expected boundary but found', ignore);
throw new Error('expected boundary');
}
const message = await readMessage(stream);
events.emit('data', message);
message.unshift('');
const headers = parseHeaders(message);
const body = await readBody(stream, headers);
try {
if (!headers['content-type'].includes('application/xml') && lastSmartDetection) {
if (!headers['content-type']?.startsWith('image/jpeg')) {
continue;
}
events.emit('smart', lastSmartDetection, body);
lastSmartDetection = undefined;
continue;
}
}
finally {
// is it possible that smart detections are sent without images?
// if so, flush this detection.
if (lastSmartDetection) {
events.emit('smart', lastSmartDetection);
}
}
const data = body.toString();
events.emit('data', data);
for (const event of Object.values(HikvisionCameraEvent)) {
if (data.indexOf(event) !== -1) {
const cameraNumber = data.match(/<channelID>(.*?)</)?.[1] || data.match(/<dynChannelID>(.*?)</)?.[1];
const inactive = data.indexOf('<eventState>inactive</eventState>') !== -1;
events.emit('event', event, cameraNumber, inactive, data);
if (event === HikvisionCameraEvent.LineDetection
|| event === HikvisionCameraEvent.RegionEntrance
|| event === HikvisionCameraEvent.RegionExit
|| event === HikvisionCameraEvent.FieldDetection) {
lastSmartDetection = data;
}
}
stream.on('data', (buffer: Buffer) => {
const data = buffer.toString();
for (const event of Object.values(HikvisionCameraEvent)) {
if (data.indexOf(event) !== -1) {
const cameraNumber = data.match(/<channelID>(.*?)</)?.[1] || data.match(/<dynChannelID>(.*?)</)?.[1];
const inactive = data.indexOf('<eventState>inactive</eventState>') !== -1;
stream.emit('event', event, cameraNumber, inactive, data);
}
}
})()
.catch(() => stream.destroy());
return events as any as Destroyable;
});
return stream;
});
this.listenerPromise.catch(() => this.listenerPromise = undefined);
this.listenerPromise.then(stream => {
stream.on('close', () => this.listenerPromise = undefined);
stream.on('end', () => this.listenerPromise = undefined);
});
}
return this.listenerPromise;

View File

@@ -1,12 +1,11 @@
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting } from "@scrypted/sdk";
import crypto from 'crypto';
import sdk, { Camera, DeviceCreatorSettings, DeviceInformation, FFmpegInput, Intercom, MediaObject, MediaStreamOptions, Reboot, RequestPictureOptions, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting } from "@scrypted/sdk";
import { PassThrough } from "stream";
import xml2js from 'xml2js';
import { RtpPacket } from '../../../external/werift/packages/rtp/src/rtp/rtp';
import { OnvifIntercom } from "../../onvif/src/onvif-intercom";
import { RtspProvider, RtspSmartCamera, UrlMediaStreamOptions } from "../../rtsp/src/rtsp";
import { startRtpForwarderProcess } from '../../webrtc/src/rtp-forwarders';
import { HikvisionCameraAPI, HikvisionCameraEvent, detectionMap } from "./hikvision-camera-api";
import { HikvisionCameraAPI, HikvisionCameraEvent } from "./hikvision-camera-api";
const { mediaManager } = sdk;
@@ -16,17 +15,15 @@ function channelToCameraNumber(channel: string) {
return channel.substring(0, channel.length - 2);
}
class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboot, ObjectDetector {
class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboot {
detectedChannels: Promise<Map<string, MediaStreamOptions>>;
client: HikvisionCameraAPI;
onvifIntercom = new OnvifIntercom(this);
activeIntercom: Awaited<ReturnType<typeof startRtpForwarderProcess>>;
hasSmartDetection: boolean;
constructor(nativeId: string, provider: RtspProvider) {
super(nativeId, provider);
this.hasSmartDetection = this.storage.getItem('hasSmartDetection') === 'true';
this.updateDevice();
this.updateDeviceInfo();
}
@@ -66,52 +63,41 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
let ignoreCameraNumber: boolean;
const motionTimeoutDuration = 20000;
// check if the camera+channel field is in use, and filter events.
const checkCameraNumber = async (cameraNumber: string) => {
// check if the camera+channel field is in use, and filter events.
if (this.getRtspChannel()) {
// it is possible to set it up to use a camera number
// on an nvr IP (which gives RTSP urls through the NVR), but then use a http port
// that gives a filtered event stream from only that camera.
// this this case, the camera numbers will not
// match as they will be always be "1".
// to detect that a camera specific endpoint is being used
// can look at the channel ids, and see if that camera number is found.
// this is different from the use case where the NVR or camera
// is using a port other than 80 (the default).
// could add a setting to have the user explicitly denote nvr usage
// but that is error prone.
const userCameraNumber = this.getCameraNumber();
if (ignoreCameraNumber === undefined && this.detectedChannels) {
const channelIds = (await this.detectedChannels).keys();
ignoreCameraNumber = true;
for (const id of channelIds) {
if (channelToCameraNumber(id) === userCameraNumber) {
ignoreCameraNumber = false;
break;
}
}
}
if (!ignoreCameraNumber && cameraNumber !== userCameraNumber) {
// this.console.error(`### Skipping motion event ${cameraNumber} != ${this.getCameraNumber()}`);
return false;
}
}
return true;
};
events.on('event', async (event: HikvisionCameraEvent, cameraNumber: string, inactive: boolean) => {
if (event === HikvisionCameraEvent.MotionDetected
|| event === HikvisionCameraEvent.LineDetection
|| event === HikvisionCameraEvent.RegionEntrance
|| event === HikvisionCameraEvent.RegionExit
|| event === HikvisionCameraEvent.FieldDetection) {
if (!await checkCameraNumber(cameraNumber))
return;
// check if the camera+channel field is in use, and filter events.
if (this.getRtspChannel()) {
// it is possible to set it up to use a camera number
// on an nvr IP (which gives RTSP urls through the NVR), but then use a http port
// that gives a filtered event stream from only that camera.
// this this case, the camera numbers will not
// match as they will be always be "1".
// to detect that a camera specific endpoint is being used
// can look at the channel ids, and see if that camera number is found.
// this is different from the use case where the NVR or camera
// is using a port other than 80 (the default).
// could add a setting to have the user explicitly denote nvr usage
// but that is error prone.
const userCameraNumber = this.getCameraNumber();
if (ignoreCameraNumber === undefined && this.detectedChannels) {
const channelIds = (await this.detectedChannels).keys();
ignoreCameraNumber = true;
for (const id of channelIds) {
if (channelToCameraNumber(id) === userCameraNumber) {
ignoreCameraNumber = false;
break;
}
}
}
if (!ignoreCameraNumber && cameraNumber !== userCameraNumber) {
// this.console.error(`### Skipping motion event ${cameraNumber} != ${this.getCameraNumber()}`);
return;
}
}
this.motionDetected = true;
clearTimeout(motionTimeout);
@@ -120,107 +106,11 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
this.motionDetected = false;
}, motionTimeoutDuration);
}
});
let inputDimensions: [number, number];
events.on('smart', async (data: string, image: Buffer) => {
if (!this.hasSmartDetection) {
this.hasSmartDetection = true;
this.storage.setItem('hasSmartDetection', 'true');
this.updateDevice();
}
const xml = await xml2js.parseStringPromise(data);
const [channelId] = xml.EventNotificationAlert.channelID;
if (!await checkCameraNumber(channelId)) {
this.console.warn('chann fail')
return;
}
const now = Date.now();
let detections: ObjectDetectionResult[] = xml.EventNotificationAlert?.DetectionRegionList?.map(region => {
const { DetectionRegionEntry } = region;
const dre = DetectionRegionEntry[0];
if (!DetectionRegionEntry)
return;
const { detectionTarget } = dre;
// const { TargetRect } = dre;
// const { X, Y, width, height } = TargetRect[0];
const [name] = detectionTarget;
return {
score: 1,
className: detectionMap[name] || name,
// boundingBox: [
// parseInt(X),
// parseInt(Y),
// parseInt(width),
// parseInt(height),
// ],
// movement: {
// moving: true,
// firstSeen: now,
// lastSeen: now,
// }
} as ObjectDetectionResult;
});
detections = detections?.filter(d => d);
if (!detections?.length)
return;
// if (inputDimensions === undefined && loadSharp()) {
// try {
// const { image: i, metadata } = await loadVipsMetadata(image);
// i.destroy();
// inputDimensions = [metadata.width, metadata.height];
// }
// catch (e) {
// inputDimensions = null;
// }
// finally {
// }
// }
let detectionId: string;
if (image) {
detectionId = crypto.randomBytes(4).toString('hex');
this.recentDetections.set(detectionId, image);
setTimeout(() => this.recentDetections.delete(detectionId), 10000);
}
const detected: ObjectsDetected = {
inputDimensions,
detectionId,
timestamp: now,
detections,
};
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detected);
});
})
return events;
}
recentDetections = new Map<string, Buffer>();
async getDetectionInput(detectionId: string, eventId?: any): Promise<MediaObject> {
const image = this.recentDetections.get(detectionId);
if (!image)
return;
return mediaManager.createMediaObject(image, 'image/jpeg');
}
async getObjectTypes(): Promise<ObjectDetectionTypes> {
return {
classes: [
...Object.values(detectionMap),
]
}
}
createClient() {
return new HikvisionCameraAPI(this.getHttpAddress(), this.getUsername(), this.getPassword(), this.console);
}
@@ -394,9 +284,6 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
interfaces.push(ScryptedInterface.Intercom);
}
if (this.hasSmartDetection)
interfaces.push(ScryptedInterface.ObjectDetector);
this.provider.updateDevice(this.nativeId, this.name, interfaces, type);
}
@@ -521,7 +408,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
const put = this.getClient().request({
url,
method: 'PUT',
responseType: 'text',
responseType: 'readable',
headers: {
'Content-Type': 'application/octet-stream',
// 'Connection': 'close',
@@ -553,12 +440,6 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
forwarder.killPromise.finally(() => {
this.console.log('audio finished');
passthrough.end();
setTimeout(() => {
this.stopIntercom();
}, 1000);
});
put.finally(() => {
this.stopIntercom();
});
@@ -567,7 +448,7 @@ class HikvisionCamera extends RtspSmartCamera implements Camera, Intercom, Reboo
if (response.statusCode !== 200)
forwarder.kill();
})
.catch(() => forwarder.kill());
.catch(() => forwarder.kill());
}
async stopIntercom(): Promise<void> {
@@ -700,4 +581,4 @@ class HikvisionProvider extends RtspProvider {
}
}
export default HikvisionProvider;
export default new HikvisionProvider();

View File

@@ -1,4 +1,4 @@
{
"scrypted.debugHost": "127.0.0.1"
"scrypted.debugHost": "scrypted-server"
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/homekit",
"version": "1.2.54",
"version": "1.2.43",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/homekit",
"version": "1.2.54",
"version": "1.2.43",
"dependencies": {
"@koush/werift-src": "file:../../external/werift",
"check-disk-space": "^3.4.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/homekit",
"version": "1.2.54",
"version": "1.2.43",
"description": "HomeKit Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -66,7 +66,7 @@ export function createHAPUsername() {
}
export function getAddresses() {
const addresses = Object.entries(os.networkInterfaces()).filter(([iface]) => iface.startsWith('en') || iface.startsWith('eth') || iface.startsWith('wlan') || iface.startsWith('net')).map(([_, addr]) => addr).flat().map(info => info.address).filter(address => address);
const addresses = Object.entries(os.networkInterfaces()).filter(([iface]) => iface.startsWith('en') || iface.startsWith('eth') || iface.startsWith('wlan')).map(([_, addr]) => addr).flat().map(info => info.address).filter(address => address);
return addresses;
}
@@ -74,17 +74,13 @@ export function getRandomPort() {
return Math.round(30000 + Math.random() * 20000);
}
export function createHAPUsernameStorageSettingsDict(device: { storage: Storage, name?: string }, group: string, subgroup?: string): StorageSettingsDict<'mac' | 'addIdentifyingMaterial' | 'qrCode' | 'pincode' | 'portOverride' | 'resetAccessory'> {
export function createHAPUsernameStorageSettingsDict(device: { storage: Storage, name?: string }, group: string, subgroup?: string): StorageSettingsDict<'mac' | 'qrCode' | 'pincode' | 'portOverride' | 'resetAccessory'> {
const alertReload = () => {
sdk.log.a(`The HomeKit plugin will reload momentarily for the changes to ${device.name} to take effect.`);
sdk.deviceManager.requestRestart();
}
return {
addIdentifyingMaterial: {
hide: true,
type: 'boolean',
},
qrCode: {
group,
// subgroup,

View File

@@ -267,9 +267,6 @@ export class HomeKitPlugin extends ScryptedDeviceBase implements MixinProvider,
},
undefined, 'Pairing'));
storageSettings.settings.pincode.persistedDefaultValue = randomPinCode();
// TODO: change this value after this current default has been persisted to existing clients.
// changing it now will cause existing accessories be renamed.
storageSettings.settings.addIdentifyingMaterial.persistedDefaultValue = false;
const mixinConsole = deviceManager.getMixinConsole(device.id, this.nativeId);
@@ -280,7 +277,7 @@ export class HomeKitPlugin extends ScryptedDeviceBase implements MixinProvider,
published = true;
mixinConsole.log('Device is in accessory mode and is online. HomeKit services are being published.');
await this.publishAccessory(accessory, storageSettings.values.mac, storageSettings.values.pincode, standaloneCategory, storageSettings.values.portOverride, storageSettings.values.addIdentifyingMaterial);
await this.publishAccessory(accessory, storageSettings.values.mac, storageSettings.values.pincode, standaloneCategory, storageSettings.values.portOverride);
if (!hasPublished) {
hasPublished = true;
storageSettings.values.qrCode = accessory.setupURI();
@@ -423,7 +420,7 @@ export class HomeKitPlugin extends ScryptedDeviceBase implements MixinProvider,
return bind;
}
async publishAccessory(accessory: Accessory, username: string, pincode: string, category: Categories, port: number, addIdentifyingMaterial: boolean) {
async publishAccessory(accessory: Accessory, username: string, pincode: string, category: Categories, port: number) {
const bind = await this.getAdvertiserInterfaceBind();
await accessory.publish({
@@ -431,7 +428,7 @@ export class HomeKitPlugin extends ScryptedDeviceBase implements MixinProvider,
port,
pincode,
category,
addIdentifyingMaterial,
addIdentifyingMaterial: false,
advertiser: this.getAdvertiser(),
bind,
});

View File

@@ -103,22 +103,24 @@ addSupportedType({
const isRecordingEnabled = device.interfaces.includes(ScryptedInterface.MotionSensor);
let configuration: CameraRecordingConfiguration;
const openRecordingStreams = new Map<number, AsyncGenerator<RecordingPacket>>();
const openRecordingStreams = new Map<number, Deferred<any>>();
if (isRecordingEnabled) {
recordingDelegate = {
updateRecordingConfiguration(newConfiguration: CameraRecordingConfiguration) {
configuration = newConfiguration;
},
handleRecordingStreamRequest(streamId: number): AsyncGenerator<RecordingPacket> {
const ret = handleFragmentsRequests(streamId, device, configuration, console, homekitPlugin,
() => openRecordingStreams.has(streamId));
openRecordingStreams.set(streamId, ret);
const ret = handleFragmentsRequests(streamId, device, configuration, console, homekitPlugin);
const d = new Deferred<any>();
d.promise.then(reason => {
ret.throw(new Error(reason.toString()));
openRecordingStreams.delete(streamId);
});
openRecordingStreams.set(streamId, d);
return ret;
},
closeRecordingStream(streamId, reason) {
const r = openRecordingStreams.get(streamId);
r?.throw(new Error(reason?.toString()));
openRecordingStreams.delete(streamId);
openRecordingStreams.get(streamId)?.resolve(reason);
},
updateRecordingActive(active) {
},

View File

@@ -67,29 +67,12 @@ async function checkMp4StartsWithKeyFrame(console: Console, mp4: Buffer) {
await timeoutPromise(1000, new Promise(resolve => cp.on('exit', resolve)));
const h264 = Buffer.concat(buffers);
let offset = 0;
let countedZeroes = 0;
while (offset < h264.length - 6) {
const byte = h264[offset];
if (byte === 0) {
countedZeroes = Math.min(4, countedZeroes + 1);
if (h264.readInt32BE(offset) !== 1) {
offset++;
continue;
}
if (countedZeroes < 2) {
countedZeroes = 0;
offset++
continue;
}
countedZeroes = 0;
if (byte !== 1) {
offset++;
continue;
}
offset++;
offset += 4;
let naluType = h264.readUInt8(offset) & 0x1f;
if (naluType === NAL_TYPE_FU_A) {
offset++;
@@ -116,7 +99,7 @@ async function checkMp4StartsWithKeyFrame(console: Console, mp4: Buffer) {
}
export async function* handleFragmentsRequests(streamId: number, device: ScryptedDevice & VideoCamera & MotionSensor & AudioSensor,
configuration: CameraRecordingConfiguration, console: Console, homekitPlugin: HomeKitPlugin, isOpen: () => boolean): AsyncGenerator<RecordingPacket> {
configuration: CameraRecordingConfiguration, console: Console, homekitPlugin: HomeKitPlugin): AsyncGenerator<RecordingPacket> {
// homekitPlugin.storageSettings.values.lastKnownHomeHub = connection.remoteAddress;
@@ -194,7 +177,7 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
}
let audioArgs: string[];
if (!noAudio && (transcodeRecording || isDefinitelyNotAAC || debugMode.audio)) {
if (transcodeRecording || isDefinitelyNotAAC || debugMode.audio) {
if (!(transcodeRecording || debugMode.audio))
console.warn('Recording audio is not explicitly AAC, forcing transcoding. Setting audio output to AAC is recommended.', audioCodec);
@@ -319,7 +302,6 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
let needSkip = true;
let ftyp: Buffer[];
let moov: Buffer[];
for await (const box of generator) {
const { header, type, data } = box;
// console.log('motion fragment box', type);
@@ -332,7 +314,7 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
checkMp4 = false;
// pending will contain the moof
try {
if (false && !await checkMp4StartsWithKeyFrame(console, Buffer.concat([...ftyp, ...moov, ...pending, header, data]))) {
if (!await checkMp4StartsWithKeyFrame(console, Buffer.concat([...ftyp, ...moov, ...pending, header, data]))) {
needSkip = false;
pending = [];
continue;
@@ -361,19 +343,17 @@ export async function* handleFragmentsRequests(streamId: number, device: Scrypte
data: fragment,
isLast,
}
if (!isOpen())
return;
yield recordingPacket;
if (wasLast)
break;
}
}
console.log(`motion recording finished`);
}
catch (e) {
console.log(`motion recording completed ${e}`);
}
finally {
console.log(`motion recording finished`);
clearTimeout(videoTimeout);
cleanupPipes();
recordingFile?.end();

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/mqtt",
"version": "0.0.80",
"version": "0.0.77",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/mqtt",
"version": "0.0.80",
"version": "0.0.77",
"dependencies": {
"aedes": "^0.46.1",
"axios": "^0.23.0",

View File

@@ -41,5 +41,5 @@
"@types/node": "^18.4.2",
"@types/nunjucks": "^3.2.0"
},
"version": "0.0.80"
"version": "0.0.77"
}

View File

@@ -6,7 +6,6 @@ import nunjucks from 'nunjucks';
import sdk from "@scrypted/sdk";
import type { MqttProvider } from './main';
import { getHsvFromXyColor, getXyYFromHsvColor } from './color-util';
import { MqttEvent } from './api/mqtt-client';
const { deviceManager } = sdk;
@@ -26,7 +25,7 @@ typeMap.set('light', {
const interfaces = [ScryptedInterface.OnOff, ScryptedInterface.Brightness];
if (config.color_mode) {
config.supported_color_modes.forEach(color_mode => {
if (color_mode === 'xy')
if (color_mode === 'xy')
interfaces.push(ScryptedInterface.ColorSettingHsv);
else if (color_mode === 'hs')
interfaces.push(ScryptedInterface.ColorSettingHsv);
@@ -247,7 +246,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
return;
}
this.debounceCallbacks = new Map<string, Set<(payload: Buffer) => void>>();
this.debounceCallbacks = new Map<string, Set<(payload: Buffer) => void>>();
const { client } = provider;
client.on('message', this.listener.bind(this));
@@ -298,7 +297,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
this.console.log('binding...');
const { client } = this.provider;
this.debounceCallbacks = new Map<string, Set<(payload: Buffer) => void>>();
this.debounceCallbacks = new Map<string, Set<(payload: Buffer) => void>>();
if (this.providedInterfaces.includes(ScryptedInterface.Online)) {
const config = this.loadComponentConfig(ScryptedInterface.Online);
@@ -469,7 +468,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
config.command_off_template,
command, "ON");
} else {
this.publishValue(config.command_topic,
this.publishValue(config.command_topic,
undefined, command, command);
}
}
@@ -490,7 +489,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
config.command_on_template,
command, "ON");
} else {
this.publishValue(config.command_topic,
this.publishValue(config.command_topic,
undefined, command, command);
}
}
@@ -507,8 +506,8 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
config.brightness_value_template,
scaledBrightness, scaledBrightness);
} else {
this.publishValue(config.command_topic,
`{ "state": "${scaledBrightness === 0 ? 'OFF' : 'ON'}", "brightness": ${scaledBrightness} }`,
this.publishValue(config.command_topic,
`{ "state": "${ scaledBrightness === 0 ? 'OFF' : 'ON'}", "brightness": ${scaledBrightness} }`,
scaledBrightness, 255);
}
}
@@ -526,7 +525,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
if (kelvin >= 0 || kelvin <= 100) {
const min = await this.getTemperatureMinK();
const max = await this.getTemperatureMaxK();
const diff = (max - min) * (kelvin / 100);
const diff = (max - min) * (kelvin/100);
kelvin = Math.round(min + diff);
}
@@ -543,7 +542,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
config.color_temp_command_template,
color, color);
} else {
this.publishValue(config.command_topic,
this.publishValue(config.command_topic,
undefined, color, color);
}
}
@@ -568,7 +567,7 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
config.hs_command_template,
color, color);
} else {
this.publishValue(config.command_topic,
this.publishValue(config.command_topic,
undefined, color, color);
}
} else if (this.colorMode === "xy") {
@@ -590,12 +589,12 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
config.xy_command_template,
color, color);
} else {
this.publishValue(config.command_topic,
this.publishValue(config.command_topic,
undefined, color, color);
}
}
}
}
async lock(): Promise<void> {
const config = this.loadComponentConfig(ScryptedInterface.Lock);
return this.publishValue(config.command_topic,
@@ -611,9 +610,6 @@ export class MqttAutoDiscoveryDevice extends ScryptedDeviceBase implements Onlin
interface AutoDiscoveryConfig {
component: string;
create: (mqttId: string, device: MixinDeviceBase<any>, topic: string) => any;
subscriptions?: {
[topic: string]: (device: MixinDeviceBase<any>, event: MqttEvent) => void;
}
}
const autoDiscoveryMap = new Map<string, AutoDiscoveryConfig>();
@@ -680,31 +676,7 @@ autoDiscoveryMap.set(ScryptedInterface.HumiditySensor, {
}
});
autoDiscoveryMap.set(ScryptedInterface.OnOff, {
component: 'switch',
create(mqttId, device, topic) {
return {
payload_on: 'true',
payload_off: 'false',
state_topic: `${topic}/${ScryptedInterfaceProperty.on}`,
command_topic: `${topic}/${ScryptedInterfaceProperty.on}/set`,
...getAutoDiscoveryDevice(device, mqttId),
}
},
subscriptions: {
'on/set': (device, event) => {
const d = sdk.systemManager.getDeviceById<OnOff>(device.id);
if (event.json)
d.turnOn();
else
d.turnOff();
}
},
});
export function publishAutoDiscovery(mqttId: string, client: Client, device: MixinDeviceBase<any>, topic: string, subscribe: boolean, autoDiscoveryPrefix = 'homeassistant') {
const subs = new Set<string>();
export function publishAutoDiscovery(mqttId: string, client: Client, device: MixinDeviceBase<any>, topic: string, autoDiscoveryPrefix = 'homeassistant') {
for (const iface of device.interfaces) {
const found = autoDiscoveryMap.get(iface);
if (!found)
@@ -719,38 +691,5 @@ export function publishAutoDiscovery(mqttId: string, client: Client, device: Mix
client.publish(configTopic, JSON.stringify(config), {
retain: true,
});
if (subscribe) {
const subscriptions = found.subscriptions || {};
for (const subscriptionTopic of Object.keys(subscriptions || {})) {
subs.add(subscriptionTopic);
const fullTopic = topic + '/' + subscriptionTopic;
const cb = subscriptions[subscriptionTopic];
client.subscribe(fullTopic)
client.on('message', (messageTopic, message) => {
if (fullTopic !== messageTopic && fullTopic !== '/' + messageTopic)
return;
device.console.log('mqtt message', subscriptionTopic, message.toString());
cb(device, {
get text() {
return message.toString();
},
get json() {
try {
return JSON.parse(message.toString());
}
catch (e) {
}
},
get buffer() {
return message;
}
})
});
}
}
}
return subs;
}

View File

@@ -294,15 +294,13 @@ class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
allProperties.push(...properties);
}
let found: ReturnType<typeof publishAutoDiscovery>;
client.on('connect', packet => {
this.console.log('MQTT client connected, publishing current state.');
for (const method of allMethods) {
client.subscribe(this.pathname + '/' + method);
}
found = publishAutoDiscovery(this.provider.storageSettings.values.mqttId, client, this, this.pathname, true, 'homeassistant');
publishAutoDiscovery(this.provider.storageSettings.values.mqttId, client, this, this.pathname, 'homeassistant');
client.subscribe('homeassistant/status');
this.publishState(client);
});
@@ -313,17 +311,14 @@ class MqttPublisherMixin extends SettingsMixinDeviceBase<any> {
client.on('message', async (messageTopic, message) => {
if (messageTopic === 'homeassistant/status') {
publishAutoDiscovery(this.provider.storageSettings.values.mqttId, client, this, this.pathname, false, 'homeassistant');
publishAutoDiscovery(this.provider.storageSettings.values.mqttId, client, this, this.pathname, 'homeassistant');
this.publishState(client);
return;
}
const method = messageTopic.substring(this.pathname.length + 1);
if (!allMethods.includes(method)) {
if (!allProperties.includes(method)) {
if (!found?.has(method)) {
this.console.warn('unknown topic', method);
}
}
if (!allProperties.includes(method))
this.console.warn('unknown topic', method);
return;
}
try {
@@ -597,7 +592,7 @@ export class MqttProvider extends ScryptedDeviceBase implements DeviceProvider,
return isPublishable(type, interfaces) ? [ScryptedInterface.Settings] : undefined;
}
async getMixin(mixinDevice: any, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState: WritableDeviceState): Promise<any> {
async getMixin(mixinDevice: any, mixinDeviceInterfaces: ScryptedInterface[], mixinDeviceState:WritableDeviceState): Promise<any> {
return new MqttPublisherMixin(this, {
mixinDevice,
mixinDeviceState,

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/objectdetector",
"version": "0.1.39",
"version": "0.1.33",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/objectdetector",
"version": "0.1.39",
"version": "0.1.33",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/objectdetector",
"version": "0.1.39",
"version": "0.1.33",
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -1,75 +0,0 @@
// visual similarity
const similarCharacters = [
['0', 'O', 'D'],
['1', 'I'],
['2', 'Z'],
['4', 'A'],
['5', 'S'],
['8', 'B'],
['6', 'G'],
// not sure about this one.
['A', '4'],
['C', 'G'],
['E', 'F'],
];
const similarCharactersMap = new Map<string, Set<string>>();
for (const similarCharacter of similarCharacters) {
for (const character of similarCharacter) {
if (!similarCharactersMap.has(character)) {
similarCharactersMap.set(character, new Set());
}
for (const similar of similarCharacter) {
similarCharactersMap.get(character)!.add(similar);
}
}
}
function isSameCharacter(c1: string, c2: string) {
if (c1 === c2)
return true;
return similarCharactersMap.get(c1)?.has(c2);
}
export function levenshteinDistance(str1: string, str2: string): number {
// todo: handle lower/uppercase similarity in similarCharacters above.
// ie, b is visualy similar to 6, but does not really look like B.
// others include e and C. v, u and Y. l, i, 1.
str1 = str1.toUpperCase();
str2 = str2.toUpperCase();
const len1 = str1.length;
const len2 = str2.length;
// If either string is empty, the distance is the length of the other string
if (len1 === 0) return len2;
if (len2 === 0) return len1;
let prev: number[] = new Array(len2 + 1);
let curr: number[] = new Array(len2 + 1);
// Initialize the first row of the matrix to be the index of the second string
for (let i = 0; i <= len2; i++) {
prev[i] = i;
}
for (let i = 1; i <= len1; i++) {
// Initialize the current row with the distance from the previous row's first element
curr[0] = i;
for (let j = 1; j <= len2; j++) {
let cost = isSameCharacter(str1.charAt(i - 1), str2.charAt(j - 1)) ? 0 : 1;
// Compute the minimum of three possible operations: insertion, deletion, or substitution
curr[j] = Math.min(prev[j] + 1, curr[j - 1] + 1, prev[j - 1] + cost);
}
// Swap the previous and current rows for the next iteration
const temp = prev;
prev = curr;
curr = temp;
}
return prev[len2];
}

View File

@@ -162,7 +162,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
getCurrentSettings() {
const settings = this.model.settings;
if (!settings)
return { id : this.id };
return;
const ret: { [key: string]: any } = {};
for (const setting of settings) {
@@ -183,10 +183,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (this.hasMotionType)
ret['motionAsObjects'] = true;
return {
...ret,
id: this.id,
};
return ret;
}
maybeStartDetection() {
@@ -913,10 +910,9 @@ class ObjectDetectorMixin extends MixinDeviceBase<ObjectDetection> implements Mi
let objectDetection = systemManager.getDeviceById<ObjectDetection>(this.id);
const hasMotionType = this.model.classes.includes('motion');
const group = hasMotionType ? 'Motion Detection' : 'Object Detection';
const model = await objectDetection.getDetectionModel({ id: mixinDeviceState.id });
// const group = objectDetection.name.replace('Plugin', '').trim();
const ret = new ObjectDetectionMixin(this.plugin, mixinDevice, mixinDeviceInterfaces, mixinDeviceState, this.mixinProviderNativeId, objectDetection, model, group, hasMotionType);
const ret = new ObjectDetectionMixin(this.plugin, mixinDevice, mixinDeviceInterfaces, mixinDeviceState, this.mixinProviderNativeId, objectDetection, this.model, group, hasMotionType);
this.currentMixins.add(ret);
return ret;
}

View File

@@ -1,7 +1,6 @@
import sdk, { Camera, EventListenerRegister, MediaObject, MotionSensor, ObjectDetector, ObjectsDetected, Readme, RequestPictureOptions, ResponsePictureOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedNativeId, Setting, SettingValue, Settings } from "@scrypted/sdk";
import { StorageSetting, StorageSettings } from "@scrypted/sdk/storage-settings";
import type { ObjectDetectionPlugin } from "./main";
import { levenshteinDistance } from "./edit-distance";
export const SMART_MOTIONSENSOR_PREFIX = 'smart-motionsensor-';
@@ -45,7 +44,7 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
defaultValue: 0.7,
},
requireDetectionThumbnail: {
title: 'Require Detections with Images',
title: 'Rquire Detections with Images',
description: 'When enabled, this sensor will ignore detections results that do not have images.',
type: 'boolean',
defaultValue: false,
@@ -56,21 +55,6 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
type: 'boolean',
defaultValue: false,
},
labels: {
group: 'Recognition',
title: 'Labels',
description: 'The labels (license numbers, names) that will trigger this smart motion sensor.',
multiple: true,
combobox: true,
choices: [],
},
labelDistance: {
group: 'Recognition',
title: 'Label Distance',
description: 'The maximum edit distance between the detected label and the desired label. Ie, a distance of 1 will match "abcde" to "abcbe" or "abcd".',
type: 'number',
defaultValue: 2,
},
});
listener: EventListenerRegister;
@@ -173,8 +157,6 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
if (this.storageSettings.values.requireDetectionThumbnail && !detected.detectionId)
return false;
const { labels, labelDistance } = this.storageSettings.values;
const match = detected.detections?.find(d => {
if (this.storageSettings.values.requireScryptedNvrDetections && !d.boundingBox)
return false;
@@ -199,27 +181,10 @@ export class SmartMotionSensor extends ScryptedDeviceBase implements Settings, R
this.console.warn('Camera does not provide Zones in detection event. Zone filter will not be applied.');
}
}
// when not searching for a label, validate the object is moving.
if (!labels?.length)
return !d.movement || d.movement.moving;
if (!d.label)
return false;
for (const label of labels) {
if (label === d.label)
return true;
if (!labelDistance)
continue;
if (levenshteinDistance(label, d.label) <= labelDistance)
return true;
this.console.log('Label does not match.', label, d.label);
}
return false;
});
if (!d.movement)
return true;
return d.movement.moving;
})
if (match) {
if (!this.motionDetected)
console.log('Smart Motion Sensor triggered on', match);

View File

@@ -1,6 +0,0 @@
# ONNX Object Detection for Scrypted
This plugin adds object detection capabilities to any camera in Scrypted. Having a fast GPU and CPU is highly recommended.
The ONNX Plugin should only be used if you are a Scrypted NVR user. It will provide no
benefits to HomeKit, which does its own detection processing.

View File

@@ -1 +0,0 @@
../../openvino/src/common

View File

@@ -1,4 +0,0 @@
from ort import ONNXPlugin
def create_scrypted_plugin():
return ONNXPlugin()

View File

@@ -1,144 +0,0 @@
from __future__ import annotations
import asyncio
from typing import Any, Tuple
import sys
import platform
import numpy as np
import onnxruntime
import scrypted_sdk
from PIL import Image
import ast
from scrypted_sdk.other import SettingValue
from scrypted_sdk.types import Setting
import concurrent.futures
import common.yolo as yolo
from predict import PredictPlugin
predictExecutor = concurrent.futures.ThreadPoolExecutor(1, "ONNX-Predict")
availableModels = [
"Default",
"scrypted_yolov6n_320",
"scrypted_yolov6n",
"scrypted_yolov6s_320",
"scrypted_yolov6s",
"scrypted_yolov9c_320",
"scrypted_yolov9c",
"scrypted_yolov8n_320",
"scrypted_yolov8n",
]
def parse_labels(names):
j = ast.literal_eval(names)
ret = {}
for k, v in j.items():
ret[int(k)] = v
return ret
class ONNXPlugin(
PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings, scrypted_sdk.DeviceProvider
):
def __init__(self, nativeId: str | None = None):
super().__init__(nativeId=nativeId)
model = self.storage.getItem("model") or "Default"
if model == "Default" or model not in availableModels:
if model != "Default":
self.storage.setItem("model", "Default")
model = "scrypted_yolov8n_320"
self.yolo = "yolo" in model
self.scrypted_yolo = "scrypted_yolo" in model
self.scrypted_model = "scrypted" in model
print(f"model {model}")
onnxmodel = "best" if self.scrypted_model else model
model_version = "v2"
onnxfile = self.downloadFile(
f"https://raw.githubusercontent.com/koush/onnx-models/main/{model}/{onnxmodel}.onnx",
f"{model_version}/{model}/{onnxmodel}.onnx",
)
print(onnxfile)
try:
sess_options = onnxruntime.SessionOptions()
providers: list[str] = []
if sys.platform == 'darwin':
providers.append("CoreMLExecutionProvider")
if 'linux' in sys.platform and platform.machine() == 'x86_64':
providers.append("CUDAExecutionProvider")
providers.append('CPUExecutionProvider')
self.compiled_model = onnxruntime.InferenceSession(onnxfile, sess_options=sess_options, providers=providers)
except:
import traceback
traceback.print_exc()
print("Reverting all settings.")
self.storage.removeItem("model")
self.requestRestart()
input = self.compiled_model.get_inputs()[0]
self.model_dim = input.shape[2]
self.input_name = input.name
self.labels = parse_labels(self.compiled_model.get_modelmeta().custom_metadata_map['names'])
async def getSettings(self) -> list[Setting]:
model = self.storage.getItem("model") or "Default"
return [
{
"key": "model",
"title": "Model",
"description": "The detection model used to find objects.",
"choices": availableModels,
"value": model,
},
]
async def putSetting(self, key: str, value: SettingValue):
self.storage.setItem(key, value)
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Settings.value, None)
self.requestRestart()
# width, height, channels
def get_input_details(self) -> Tuple[int, int, int]:
return [self.model_dim, self.model_dim, 3]
def get_input_size(self) -> Tuple[int, int]:
return [self.model_dim, self.model_dim]
async def detect_once(self, input: Image.Image, settings: Any, src_size, cvss):
def predict(input_tensor):
output_tensors = self.compiled_model.run(None, { self.input_name: input_tensor })
objs = yolo.parse_yolov9(output_tensors[0][0])
return objs
im = np.array(input)
im = np.stack([input])
im = im.transpose((0, 3, 1, 2)) # BHWC to BCHW, (n, 3, h, w)
im = im.astype(np.float32) / 255.0
im = np.ascontiguousarray(im) # contiguous
input_tensor = im
try:
objs = await asyncio.get_event_loop().run_in_executor(
predictExecutor, lambda: predict(input_tensor)
)
except:
import traceback
traceback.print_exc()
raise
ret = self.create_detection_result(objs, src_size, cvss)
return ret

View File

@@ -1,12 +0,0 @@
# uncomment to require cuda 12, but most stuff is still targetting cuda 11.
# however, stuff targetted for cuda 11 can still run on cuda 12.
# --extra-index-url https://aiinfra.pkgs.visualstudio.com/PublicPackages/_packaging/onnxruntime-cuda-12/pypi/simple/
onnxruntime-gpu; 'linux' in sys_platform and platform_machine == 'x86_64'
# cpu and coreml execution provider
onnxruntime; 'linux' not in sys_platform or platform_machine != 'x86_64'
# ort-nightly-gpu==1.17.3.dev20240409002
# pillow-simd is available on x64 linux
# pillow-simd confirmed not building with arm64 linux or apple silicon
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/onvif",
"version": "0.1.14",
"version": "0.1.10",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/onvif",
"version": "0.1.14",
"version": "0.1.10",
"license": "Apache",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/onvif",
"version": "0.1.14",
"version": "0.1.10",
"description": "ONVIF Camera Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -97,7 +97,7 @@ export class OnvifIntercom implements Intercom {
this.camera.console.log('backchannel transport', transportDict);
const availableMatches = audioBackchannel.rtpmaps.filter(rtpmap => rtpmap.ffmpegEncoder);
const defaultMatch = audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder === 'pcm_mulaw') || audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder);
const defaultMatch = audioBackchannel.rtpmaps.find(rtpmap => rtpmap.ffmpegEncoder);
if (!defaultMatch)
throw new Error('no supported codec was found for back channel');
@@ -151,7 +151,7 @@ export class OnvifIntercom implements Intercom {
}
const elapsedRtpTimeMs = Math.abs(pending.header.timestamp - p.header.timestamp) / 8000 * 1000;
if (elapsedRtpTimeMs <= 160 && pending.payload.length + p.payload.length <= 1024) {
if (elapsedRtpTimeMs <= 60) {
pending.payload = Buffer.concat([pending.payload, p.payload]);
return;
}

View File

@@ -1,13 +1,9 @@
# plugin
numpy>=1.16.2
# pillow for anything not intel linux
Pillow>=5.4.1; sys_platform != 'linux' or platform_machine != 'x86_64'
pillow-simd; sys_platform == 'linux' and platform_machine == 'x86_64'
imutils>=0.5.0
# opencv-python is not available on armhf
# locked to version because 4.8.0.76 is broken.
# todo: check newer versions.
opencv-python==4.8.0.74
# pillow-simd is available on x64 linux
# pillow-simd confirmed not building with arm64 linux or apple silicon
Pillow>=5.4.1; 'linux' not in sys_platform or platform_machine != 'x86_64'
pillow-simd; 'linux' in sys_platform and platform_machine == 'x86_64'
opencv-python==4.8.0.74; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'

View File

@@ -11,7 +11,7 @@
// local checkout
"scrypted.debugHost": "127.0.0.1",
"scrypted.serverRoot": "/Users/koush/.scrypted",
// "scrypted.debugHost": "koushik-winvm",
// "scrypted.debugHost": "koushik-windows",
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
"scrypted.pythonRemoteRoot": "${config:scrypted.serverRoot}/volume/plugin.zip",

View File

@@ -1,25 +1,25 @@
{
"name": "@scrypted/openvino",
"version": "0.1.80",
"version": "0.1.54",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/openvino",
"version": "0.1.80",
"version": "0.1.54",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}
},
"../../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.29",
"version": "0.2.97",
"dev": true,
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",
@@ -65,7 +65,7 @@
"@types/node": "^18.11.18",
"@types/stringify-object": "^4.0.0",
"adm-zip": "^0.4.13",
"axios": "^1.6.5",
"axios": "^0.21.4",
"babel-loader": "^9.1.0",
"babel-plugin-const-enum": "^1.1.0",
"esbuild": "^0.15.9",

Some files were not shown because too many files have changed in this diff Show More