mirror of
https://github.com/koush/scrypted.git
synced 2026-02-08 08:19:56 +00:00
Compare commits
110 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1688fcc126 | ||
|
|
99cae0ba31 | ||
|
|
a7b00b9e91 | ||
|
|
3f2a62c6f2 | ||
|
|
3fc318a370 | ||
|
|
aed8575aa0 | ||
|
|
2e28b50588 | ||
|
|
2e87cc380f | ||
|
|
1fdd2d4b01 | ||
|
|
53b23b2ca8 | ||
|
|
54016a9c78 | ||
|
|
d207a3b824 | ||
|
|
e72a74d008 | ||
|
|
d1b907e45b | ||
|
|
4a4c47ffe2 | ||
|
|
f6baf99935 | ||
|
|
b5cc138e2b | ||
|
|
40738a74cf | ||
|
|
d2b1f104ca | ||
|
|
6cb4f589c0 | ||
|
|
5cf2b26630 | ||
|
|
e7f16af04c | ||
|
|
6287b9deaa | ||
|
|
b9b5fdb712 | ||
|
|
c85af9c8a5 | ||
|
|
069f765507 | ||
|
|
0e587abc79 | ||
|
|
47770c0a8d | ||
|
|
82d1c3afe5 | ||
|
|
1c9b52ce4f | ||
|
|
adcd9fa537 | ||
|
|
91e2c2870b | ||
|
|
1fc892815d | ||
|
|
38ed1acc15 | ||
|
|
3bdc9ab930 | ||
|
|
bfa6346333 | ||
|
|
fcbb308cb8 | ||
|
|
f137edcc8c | ||
|
|
53e6f083b9 | ||
|
|
0f96fdb4bc | ||
|
|
96ea3f3b27 | ||
|
|
a31d6482af | ||
|
|
be16bf7858 | ||
|
|
1dad0126bc | ||
|
|
9292ebbe48 | ||
|
|
0b3a1a1998 | ||
|
|
b5d58b6899 | ||
|
|
215a56f70e | ||
|
|
c593701e72 | ||
|
|
46351f2fd7 | ||
|
|
9bce4acd14 | ||
|
|
cba20ec887 | ||
|
|
7c41516cce | ||
|
|
1f209072ba | ||
|
|
8978bff8a9 | ||
|
|
04c500b855 | ||
|
|
8b4859579c | ||
|
|
90deaf1161 | ||
|
|
de56a8c653 | ||
|
|
a5215ae92b | ||
|
|
73cd40b540 | ||
|
|
93556dd404 | ||
|
|
125b436cb6 | ||
|
|
0a4ea032f5 | ||
|
|
c658cee5c9 | ||
|
|
6589176c8b | ||
|
|
6c4c83f655 | ||
|
|
8d4124adda | ||
|
|
b7cda86df7 | ||
|
|
6622e13e51 | ||
|
|
cbc45da679 | ||
|
|
e7d06c66af | ||
|
|
ea02bc3b6f | ||
|
|
2b43cb7d15 | ||
|
|
f3c0362e18 | ||
|
|
817ae42250 | ||
|
|
8043f83f20 | ||
|
|
d33ab5dbcf | ||
|
|
2b1674bea8 | ||
|
|
f045e59258 | ||
|
|
9125aafc07 | ||
|
|
6f5244ec9f | ||
|
|
f1eb2f988a | ||
|
|
1f659d9a72 | ||
|
|
dd98f12f2a | ||
|
|
2063e3822a | ||
|
|
f7495a7a76 | ||
|
|
fddb9c655f | ||
|
|
297e7a7b4f | ||
|
|
29e080f6b6 | ||
|
|
c72ea24794 | ||
|
|
ada80796de | ||
|
|
1ebcf32998 | ||
|
|
79765ba58e | ||
|
|
ff4665520c | ||
|
|
be5b810335 | ||
|
|
fdc99b7fa6 | ||
|
|
f730d13cbd | ||
|
|
af02753cef | ||
|
|
9334d1c2a4 | ||
|
|
71ecc07e2b | ||
|
|
5310dd5ff6 | ||
|
|
adf1a10659 | ||
|
|
2ecc26c914 | ||
|
|
9a49416831 | ||
|
|
f0eff01898 | ||
|
|
edd071739f | ||
|
|
ab81c568bc | ||
|
|
62470df0af | ||
|
|
19b83eb056 |
50
.github/workflows/docker-HEAD.yml
vendored
50
.github/workflows/docker-HEAD.yml
vendored
@@ -1,50 +0,0 @@
|
||||
name: Publish Scrypted (git HEAD)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["16-bullseye"]
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to Github Container Registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image (scrypted)
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
build-args: BASE=${{ matrix.node }}
|
||||
context: .
|
||||
file: docker/Dockerfile.HEAD
|
||||
platforms: linux/amd64,linux/arm64,linux/armhf
|
||||
push: true
|
||||
tags: |
|
||||
koush/scrypted:HEAD
|
||||
ghcr.io/koush/scrypted:HEAD
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
45
.github/workflows/docker-common.yml
vendored
45
.github/workflows/docker-common.yml
vendored
@@ -2,56 +2,69 @@ name: Publish Scrypted Common
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
schedule:
|
||||
# publish the common base once a month.
|
||||
- cron: '30 8 2 * *'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: self-hosted
|
||||
strategy:
|
||||
matrix:
|
||||
NODE_VERSION: ["18"]
|
||||
BASE: ["bullseye", "bookworm"]
|
||||
BASE: ["jammy"]
|
||||
FLAVOR: ["full", "lite", "thin"]
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: 192.168.2.124
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: 192.168.2.119
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
append: |
|
||||
- endpoint: ssh://koush@192.168.2.124
|
||||
platforms: linux/arm64
|
||||
- endpoint: ssh://koush@192.168.2.119
|
||||
platforms: linux/armhf
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to Github Container Registry
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image (scrypted-common)
|
||||
uses: docker/build-push-action@v2
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
build-args: |
|
||||
NODE_VERSION=${{ matrix.NODE_VERSION }}
|
||||
BASE=${{ matrix.BASE }}
|
||||
context: install/docker/
|
||||
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
|
||||
platforms: linux/amd64,linux/arm64,linux/armhf
|
||||
platforms: linux/amd64,linux/armhf,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
|
||||
# ${{ matrix.NODE_VERSION == '16-bullseye' && 'koush/scrypted-common:latest' || '' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
49
.github/workflows/docker.yml
vendored
49
.github/workflows/docker.yml
vendored
@@ -15,10 +15,10 @@ on:
|
||||
jobs:
|
||||
build:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: self-hosted
|
||||
strategy:
|
||||
matrix:
|
||||
BASE: ["18-bullseye-full", "18-bullseye-lite", "18-bullseye-thin"]
|
||||
BASE: ["18-jammy-full", "18-jammy-lite", "18-jammy-thin"]
|
||||
SUPERVISOR: ["", ".s6"]
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
@@ -38,8 +38,27 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: 192.168.2.124
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: 192.168.2.119
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
append: |
|
||||
- endpoint: ssh://koush@192.168.2.124
|
||||
platforms: linux/arm64
|
||||
- endpoint: ssh://koush@192.168.2.119
|
||||
platforms: linux/armhf
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
@@ -55,7 +74,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
build-args: |
|
||||
BASE=${{ matrix.BASE }}
|
||||
@@ -66,19 +85,19 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
|
||||
${{ matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '' && 'koush/scrypted:thin' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:thin-s6' || '' }}
|
||||
${{ matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '' && 'koush/scrypted:thin' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:thin-s6' || '' }}
|
||||
|
||||
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
|
||||
${{ matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:thin' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-bullseye-thin' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:thin-s6' || '' }}
|
||||
${{ matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:thin' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '18-jammy-thin' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:thin-s6' || '' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -4,6 +4,7 @@ import { EventEmitter } from 'events';
|
||||
import { Server } from 'net';
|
||||
import { Duplex } from 'stream';
|
||||
import { cloneDeep } from './clone-deep';
|
||||
import { Deferred } from "./deferred";
|
||||
import { listenZeroSingleClient } from './listen-cluster';
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from './media-helpers';
|
||||
import { createRtspParser } from "./rtsp-server";
|
||||
@@ -228,6 +229,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
ffmpegLogInitialOutput(console, cp, undefined, options?.storage);
|
||||
cp.on('exit', () => kill(new Error('ffmpeg exited')));
|
||||
|
||||
const deferredStart = new Deferred<void>();
|
||||
// now parse the created pipes
|
||||
const start = () => {
|
||||
for (const p of startParsers) {
|
||||
@@ -246,6 +248,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
|
||||
|
||||
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
|
||||
await deferredStart.promise;
|
||||
events.emit(container, chunk);
|
||||
resetActivityTimer();
|
||||
}
|
||||
@@ -257,7 +260,6 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
});
|
||||
};
|
||||
|
||||
await parseVideoCodec(cp);
|
||||
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
|
||||
rtsp.sdp.then(sdp => {
|
||||
const parsed = parseSdp(sdp);
|
||||
@@ -268,9 +270,12 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
});
|
||||
|
||||
const sdp = rtsp.sdp.then(sdpString => [Buffer.from(sdpString)]);
|
||||
start();
|
||||
|
||||
return {
|
||||
start,
|
||||
start() {
|
||||
deferredStart.resolve();
|
||||
},
|
||||
sdp,
|
||||
get inputAudioCodec() {
|
||||
return inputAudioCodec;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Home Assistant Addon Configuration
|
||||
name: Scrypted
|
||||
version: "18-bullseye-full.s6-v0.13.2"
|
||||
version: "18-bullseye-full.s6-v0.23.0"
|
||||
slug: scrypted
|
||||
description: Scrypted is a high performance home video integration and automation platform
|
||||
url: "https://github.com/koush/scrypted"
|
||||
@@ -27,6 +27,7 @@ environment:
|
||||
SCRYPTED_NVR_VOLUME: "/data/scrypted_nvr"
|
||||
SCRYPTED_ADMIN_ADDRESS: "172.30.32.2"
|
||||
SCRYPTED_ADMIN_USERNAME: "homeassistant"
|
||||
SCRYPTED_INSTALL_ENVIRONMENT: "ha"
|
||||
backup_exclude:
|
||||
- '/server/**'
|
||||
- '/data/scrypted_nvr/**'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE="18-bullseye-full"
|
||||
ARG BASE="18-jammy-full"
|
||||
FROM koush/scrypted-common:${BASE}
|
||||
|
||||
WORKDIR /
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE="16-bullseye"
|
||||
ARG BASE="16-jammy"
|
||||
FROM koush/scrypted-common:${BASE}
|
||||
|
||||
WORKDIR /
|
||||
|
||||
@@ -6,63 +6,66 @@
|
||||
# This common file will be used by both Docker and the linux
|
||||
# install script.
|
||||
################################################################
|
||||
ARG BASE="bullseye"
|
||||
FROM debian:${BASE} as header
|
||||
ARG BASE="jammy"
|
||||
FROM ubuntu:${BASE} as header
|
||||
|
||||
RUN apt-get update && apt-get -y install curl wget
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# switch to nvm?
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
# Coral Edge TPU
|
||||
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
|
||||
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
|
||||
RUN apt-get -y update
|
||||
RUN apt-get -y install libedgetpu1-std
|
||||
|
||||
# intel opencl gpu for openvino
|
||||
RUN if [ "$(uname -m)" = "x86_64" ]; \
|
||||
then \
|
||||
apt-get -y install \
|
||||
intel-opencl-icd; \
|
||||
fi
|
||||
|
||||
RUN apt-get -y install software-properties-common apt-utils
|
||||
RUN apt-get -y update
|
||||
RUN apt-get -y upgrade
|
||||
|
||||
# base development stuff
|
||||
RUN apt-get -y install \
|
||||
# base tools and development stuff
|
||||
RUN apt-get update && apt-get -y install \
|
||||
curl software-properties-common apt-utils \
|
||||
build-essential \
|
||||
cmake \
|
||||
gcc \
|
||||
libcairo2-dev \
|
||||
libgirepository1.0-dev \
|
||||
libvips \
|
||||
pkg-config
|
||||
pkg-config && \
|
||||
apt-get -y update && \
|
||||
apt-get -y upgrade
|
||||
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update && apt-get install -y nodejs
|
||||
|
||||
# python native
|
||||
RUN apt-get -y install \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-wheel
|
||||
|
||||
# Coral Edge TPU
|
||||
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
|
||||
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
|
||||
RUN apt-get -y update && apt-get -y install libedgetpu1-std
|
||||
|
||||
# these are necessary for pillow-simd, additional on disk size is small
|
||||
# but could consider removing this.
|
||||
RUN apt-get -y install \
|
||||
libjpeg-dev zlib1g-dev
|
||||
|
||||
# plugins support fallback to pillow, but vips is faster.
|
||||
RUN apt-get -y install \
|
||||
libvips
|
||||
|
||||
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
|
||||
RUN apt-get -y install \
|
||||
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
|
||||
gstreamer1.0-vaapi
|
||||
|
||||
# python native
|
||||
# python3 gstreamer bindings
|
||||
RUN apt-get -y install \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-gst-1.0 \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-wheel
|
||||
python3-gst-1.0
|
||||
|
||||
# python 3.9 from ppa.
|
||||
# 3.9 is the version with prebuilt support for tensorflow lite
|
||||
RUN add-apt-repository ppa:deadsnakes/ppa && \
|
||||
apt-get -y install \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-distutils
|
||||
|
||||
# armv7l does not have wheels for any of these
|
||||
# and compile times would forever, if it works at all.
|
||||
@@ -70,24 +73,29 @@ RUN apt-get -y install \
|
||||
# which causes weird behavior in python which looks at the arch version
|
||||
# which still reports 64bit, even if running in 32bit docker.
|
||||
# this scenario is not supported and will be reported at runtime.
|
||||
RUN if [ "$(uname -m)" != "x86_64" ]; \
|
||||
then \
|
||||
apt-get -y install \
|
||||
python3-matplotlib \
|
||||
python3-numpy \
|
||||
python3-opencv \
|
||||
python3-pil \
|
||||
python3-skimage; \
|
||||
fi
|
||||
# this bit is not necessary on amd64, but leaving it for consistency.
|
||||
RUN apt-get -y install \
|
||||
python3-matplotlib \
|
||||
python3-numpy \
|
||||
python3-opencv \
|
||||
python3-pil \
|
||||
python3-skimage
|
||||
|
||||
# python pip
|
||||
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
|
||||
# pyvips is broken on x86 due to mismatch ffi
|
||||
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
|
||||
|
||||
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
|
||||
RUN python3 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
RUN python3.9 -m pip install --upgrade pip
|
||||
RUN python3.9 -m pip install --force-reinstall --no-binary :all: cffi
|
||||
RUN python3.9 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
################################################################
|
||||
# End section generated from template/Dockerfile.full.header
|
||||
################################################################
|
||||
@@ -96,14 +104,26 @@ RUN python3 -m pip install debugpy typing_extensions psutil
|
||||
################################################################
|
||||
FROM header as base
|
||||
|
||||
ENV SCRYPTED_DOCKER_SERVE="true"
|
||||
# intel opencl gpu for openvino
|
||||
RUN bash -c "if [ \"$(uname -m)\" == \"x86_64\" ]; \
|
||||
then \
|
||||
apt-get update && apt-get install -y gpg-agent && \
|
||||
rm -f /usr/share/keyrings/intel-graphics.gpg && \
|
||||
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg && \
|
||||
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
|
||||
apt-get -y update && \
|
||||
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free && \
|
||||
apt-get -y dist-upgrade; \
|
||||
fi"
|
||||
|
||||
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
|
||||
ENV SCRYPTED_CAN_RESTART="true"
|
||||
ENV SCRYPTED_VOLUME="/server/volume"
|
||||
ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_BASE_VERSION=20230608
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=full
|
||||
|
||||
################################################################
|
||||
|
||||
@@ -1,27 +1,23 @@
|
||||
ARG BASE="bullseye"
|
||||
FROM debian:${BASE} as header
|
||||
ARG BASE="jammy"
|
||||
FROM ubuntu:${BASE} as header
|
||||
|
||||
RUN apt-get update && apt-get -y install curl wget
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# switch to nvm?
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
RUN apt-get -y update
|
||||
RUN apt-get -y upgrade
|
||||
RUN apt-get -y install software-properties-common apt-utils
|
||||
RUN apt-get -y update
|
||||
|
||||
# base development stuff
|
||||
RUN apt-get -y install \
|
||||
# base tools and development stuff
|
||||
RUN apt-get update && apt-get -y install \
|
||||
curl software-properties-common apt-utils \
|
||||
build-essential \
|
||||
cmake \
|
||||
gcc \
|
||||
libcairo2-dev \
|
||||
libgirepository1.0-dev \
|
||||
pkg-config
|
||||
pkg-config && \
|
||||
apt-get -y update && \
|
||||
apt-get -y upgrade
|
||||
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update && apt-get install -y nodejs
|
||||
|
||||
# python native
|
||||
RUN apt-get -y install \
|
||||
@@ -36,12 +32,12 @@ RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
RUN python3 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
ENV SCRYPTED_DOCKER_SERVE="true"
|
||||
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
|
||||
ENV SCRYPTED_CAN_RESTART="true"
|
||||
ENV SCRYPTED_VOLUME="/server/volume"
|
||||
ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_BASE_VERSION=20230608
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=lite
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM koush/18-bullseye-full.s6
|
||||
FROM koush/18-jammy-full.s6
|
||||
|
||||
WORKDIR /
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
ARG BASE="18-bullseye-full"
|
||||
ARG BASE="18-jammy-full"
|
||||
FROM koush/scrypted-common:${BASE}
|
||||
|
||||
# avahi advertiser support
|
||||
RUN apt-get -y install \
|
||||
RUN apt-get update && apt-get -y install \
|
||||
libnss-mdns \
|
||||
avahi-discover \
|
||||
libavahi-compat-libdnssd-dev \
|
||||
|
||||
@@ -1,25 +1,22 @@
|
||||
ARG BASE="bullseye"
|
||||
FROM debian:${BASE} as header
|
||||
ARG BASE="jammy"
|
||||
FROM ubuntu:${BASE} as header
|
||||
|
||||
RUN apt-get update && apt-get -y install curl wget
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get -y update && \
|
||||
apt-get -y upgrade && \
|
||||
apt-get -y install curl software-properties-common apt-utils
|
||||
|
||||
# switch to nvm?
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y nodejs
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash - && apt-get update && apt-get install -y nodejs
|
||||
|
||||
RUN apt-get -y update
|
||||
RUN apt-get -y upgrade
|
||||
RUN apt-get -y install software-properties-common apt-utils
|
||||
RUN apt-get -y update
|
||||
|
||||
ENV SCRYPTED_DOCKER_SERVE="true"
|
||||
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
|
||||
ENV SCRYPTED_CAN_RESTART="true"
|
||||
ENV SCRYPTED_VOLUME="/server/volume"
|
||||
ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_BASE_VERSION=20230608
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=thin
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
./docker-build.sh
|
||||
|
||||
docker build -t koush/scrypted:18-bullseye-full.nvidia -f Dockerfile.nvidia
|
||||
docker build -t koush/scrypted:18-jammy-full.nvidia -f Dockerfile.nvidia
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
set -x
|
||||
|
||||
NODE_VERSION=18
|
||||
IMAGE_BASE=bookworm
|
||||
SCRYPTED_INSTALL_VERSION=beta
|
||||
IMAGE_BASE=jammy
|
||||
FLAVOR=full
|
||||
BASE=$NODE_VERSION-$IMAGE_BASE-$FLAVOR
|
||||
echo $BASE
|
||||
@@ -14,4 +15,4 @@ docker build -t koush/scrypted-common:$BASE -f Dockerfile.$FLAVOR \
|
||||
--build-arg NODE_VERSION=$NODE_VERSION --build-arg BASE=$IMAGE_BASE . && \
|
||||
\
|
||||
docker build -t koush/scrypted:$SUPERVISOR_BASE -f Dockerfile$SUPERVISOR \
|
||||
--build-arg BASE=$BASE .
|
||||
--build-arg BASE=$BASE --build-arg SCRYPTED_INSTALL_VERSION=$SCRYPTED_INSTALL_VERSION .
|
||||
|
||||
@@ -3,14 +3,26 @@
|
||||
################################################################
|
||||
FROM header as base
|
||||
|
||||
ENV SCRYPTED_DOCKER_SERVE="true"
|
||||
# intel opencl gpu for openvino
|
||||
RUN bash -c "if [ \"$(uname -m)\" == \"x86_64\" ]; \
|
||||
then \
|
||||
apt-get update && apt-get install -y gpg-agent && \
|
||||
rm -f /usr/share/keyrings/intel-graphics.gpg && \
|
||||
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --output /usr/share/keyrings/intel-graphics.gpg && \
|
||||
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list && \
|
||||
apt-get -y update && \
|
||||
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free && \
|
||||
apt-get -y dist-upgrade; \
|
||||
fi"
|
||||
|
||||
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
|
||||
ENV SCRYPTED_CAN_RESTART="true"
|
||||
ENV SCRYPTED_VOLUME="/server/volume"
|
||||
ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION=20230329
|
||||
ENV SCRYPTED_BASE_VERSION=20230608
|
||||
ENV SCRYPTED_DOCKER_FLAVOR=full
|
||||
|
||||
################################################################
|
||||
|
||||
@@ -3,63 +3,66 @@
|
||||
# This common file will be used by both Docker and the linux
|
||||
# install script.
|
||||
################################################################
|
||||
ARG BASE="bullseye"
|
||||
FROM debian:${BASE} as header
|
||||
ARG BASE="jammy"
|
||||
FROM ubuntu:${BASE} as header
|
||||
|
||||
RUN apt-get update && apt-get -y install curl wget
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# switch to nvm?
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y nodejs
|
||||
|
||||
# Coral Edge TPU
|
||||
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
|
||||
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
|
||||
RUN apt-get -y update
|
||||
RUN apt-get -y install libedgetpu1-std
|
||||
|
||||
# intel opencl gpu for openvino
|
||||
RUN if [ "$(uname -m)" = "x86_64" ]; \
|
||||
then \
|
||||
apt-get -y install \
|
||||
intel-opencl-icd; \
|
||||
fi
|
||||
|
||||
RUN apt-get -y install software-properties-common apt-utils
|
||||
RUN apt-get -y update
|
||||
RUN apt-get -y upgrade
|
||||
|
||||
# base development stuff
|
||||
RUN apt-get -y install \
|
||||
# base tools and development stuff
|
||||
RUN apt-get update && apt-get -y install \
|
||||
curl software-properties-common apt-utils \
|
||||
build-essential \
|
||||
cmake \
|
||||
gcc \
|
||||
libcairo2-dev \
|
||||
libgirepository1.0-dev \
|
||||
libvips \
|
||||
pkg-config
|
||||
pkg-config && \
|
||||
apt-get -y update && \
|
||||
apt-get -y upgrade
|
||||
|
||||
ARG NODE_VERSION=18
|
||||
RUN curl -fsSL https://deb.nodesource.com/setup_${NODE_VERSION}.x | bash -
|
||||
RUN apt-get update && apt-get install -y nodejs
|
||||
|
||||
# python native
|
||||
RUN apt-get -y install \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-wheel
|
||||
|
||||
# Coral Edge TPU
|
||||
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
|
||||
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
|
||||
RUN apt-get -y update && apt-get -y install libedgetpu1-std
|
||||
|
||||
# these are necessary for pillow-simd, additional on disk size is small
|
||||
# but could consider removing this.
|
||||
RUN apt-get -y install \
|
||||
libjpeg-dev zlib1g-dev
|
||||
|
||||
# plugins support fallback to pillow, but vips is faster.
|
||||
RUN apt-get -y install \
|
||||
libvips
|
||||
|
||||
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
|
||||
RUN apt-get -y install \
|
||||
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
|
||||
gstreamer1.0-vaapi
|
||||
|
||||
# python native
|
||||
# python3 gstreamer bindings
|
||||
RUN apt-get -y install \
|
||||
python3 \
|
||||
python3-dev \
|
||||
python3-gst-1.0 \
|
||||
python3-pip \
|
||||
python3-setuptools \
|
||||
python3-wheel
|
||||
python3-gst-1.0
|
||||
|
||||
# python 3.9 from ppa.
|
||||
# 3.9 is the version with prebuilt support for tensorflow lite
|
||||
RUN add-apt-repository ppa:deadsnakes/ppa && \
|
||||
apt-get -y install \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
python3.9-distutils
|
||||
|
||||
# armv7l does not have wheels for any of these
|
||||
# and compile times would forever, if it works at all.
|
||||
@@ -67,24 +70,29 @@ RUN apt-get -y install \
|
||||
# which causes weird behavior in python which looks at the arch version
|
||||
# which still reports 64bit, even if running in 32bit docker.
|
||||
# this scenario is not supported and will be reported at runtime.
|
||||
RUN if [ "$(uname -m)" != "x86_64" ]; \
|
||||
then \
|
||||
apt-get -y install \
|
||||
python3-matplotlib \
|
||||
python3-numpy \
|
||||
python3-opencv \
|
||||
python3-pil \
|
||||
python3-skimage; \
|
||||
fi
|
||||
# this bit is not necessary on amd64, but leaving it for consistency.
|
||||
RUN apt-get -y install \
|
||||
python3-matplotlib \
|
||||
python3-numpy \
|
||||
python3-opencv \
|
||||
python3-pil \
|
||||
python3-skimage
|
||||
|
||||
# python pip
|
||||
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
|
||||
# pyvips is broken on x86 due to mismatch ffi
|
||||
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
|
||||
|
||||
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
|
||||
RUN python3 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
RUN python3.9 -m pip install --upgrade pip
|
||||
RUN python3.9 -m pip install --force-reinstall --no-binary :all: cffi
|
||||
RUN python3.9 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
################################################################
|
||||
# End section generated from template/Dockerfile.full.header
|
||||
################################################################
|
||||
|
||||
@@ -45,7 +45,7 @@ ARG() {
|
||||
}
|
||||
|
||||
ENV() {
|
||||
echo "ignoring ENV $1"
|
||||
export $@
|
||||
}
|
||||
|
||||
source <(curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/template/Dockerfile.full.header)
|
||||
|
||||
@@ -161,6 +161,7 @@ export async function checkScryptedClientLogin(options?: ScryptedConnectionOptio
|
||||
const directAddress = response.headers['x-scrypted-direct-address'];
|
||||
|
||||
return {
|
||||
hostname: response.data.hostname as string,
|
||||
redirect: response.data.redirect as string,
|
||||
username: response.data.username as string,
|
||||
expiration: response.data.expiration as number,
|
||||
|
||||
@@ -1,13 +1,25 @@
|
||||
# Arlo Plugin for Scrypted
|
||||
|
||||
The Arlo Plugin connects Scrypted to Arlo cloud, allowing you to access all of your Arlo cameras in Scrypted.
|
||||
The Arlo Plugin connects Scrypted to Arlo Cloud, allowing you to access all of your Arlo cameras in Scrypted.
|
||||
|
||||
It is highly recommended to create a dedicated Arlo account for use with this plugin and share your cameras from your main account, as Arlo only permits one connection to their servers per account. Using a separate account allows you to use the Arlo app or website simultaneously with this plugin.
|
||||
It is highly recommended to create a dedicated Arlo account for use with this plugin and share your cameras from your main account, as Arlo only permits one active login to their servers per account. Using a separate account allows you to use the Arlo app or website simultaneously with this plugin, otherwise logging in from one place will log you out from all other devices.
|
||||
|
||||
The account you use for this plugin must have either SMS or email set as the default 2FA option. Once you enter your username and password on the plugin settings page, you should receive a 2FA code through your default 2FA option. Enter that code into the provided box, and your cameras will appear in Scrypted. Or, see below for configuring IMAP to auto-login with 2FA.
|
||||
|
||||
If you experience any trouble logging in, clear the username and password boxes, reload the plugin, and try again.
|
||||
|
||||
## General Setup Notes
|
||||
|
||||
* Ensure that your Arlo account's default 2FA option is set to either SMS or email.
|
||||
* Motion events notifications should be turned on in the Arlo app. If you are receiving motion push notifications, Scrypted will also receive motion events.
|
||||
* Disable smart detection and any cloud/local recording in the Arlo app. Arlo Cloud only permits one active stream per camera, so any smart detection or recording features may prevent downstream plugins (e.g. Homekit) from successfully pulling the video feed after a motion event.
|
||||
* It is highly recommended to enable the Rebroadcast plugin to allow multiple downstream plugins to pull the video feed within Scrypted.
|
||||
* If there is no audio on your camera, switch to the `FFmpeg (TCP)` parser under the `Cloud RTSP` settings.
|
||||
* Prebuffering should only be enabled if the camera is wired to a persistent power source, such as a wall outlet. Prebuffering will only work if your camera does not have a battery or `Plugged In to External Power` is selected.
|
||||
* The plugin supports pulling RTSP or DASH streams from Arlo Cloud. It is recommended to use RTSP for the lowest latency streams. DASH is inconsistent in reliability, and may return finicky codecs that require additional FFmpeg output arguments, e.g. `-vcodec h264`.
|
||||
|
||||
Note that streaming cameras uses extra Internet bandwidth, since video and audio packets will need to travel from the camera through your network, out to Arlo Cloud, and then back to your network and into Scrypted.
|
||||
|
||||
## IMAP 2FA
|
||||
|
||||
The Arlo Plugin supports using the IMAP protocol to check an email mailbox for Arlo 2FA codes. This requires you to specify an email 2FA option as the default in your Arlo account settings.
|
||||
|
||||
4
plugins/arlo/package-lock.json
generated
4
plugins/arlo/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/arlo",
|
||||
"version": "0.7.21",
|
||||
"version": "0.7.29",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/arlo",
|
||||
"version": "0.7.21",
|
||||
"version": "0.7.29",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/arlo",
|
||||
"version": "0.7.21",
|
||||
"version": "0.7.29",
|
||||
"description": "Arlo Plugin for Scrypted",
|
||||
"keywords": [
|
||||
"scrypted",
|
||||
|
||||
@@ -41,6 +41,7 @@ import math
|
||||
import random
|
||||
import time
|
||||
import uuid
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
stream_class = MQTTStream
|
||||
|
||||
@@ -81,8 +82,11 @@ USER_AGENTS = {
|
||||
class Arlo(object):
|
||||
BASE_URL = 'my.arlo.com'
|
||||
AUTH_URL = 'ocapi-app.arlo.com'
|
||||
BACKUP_AUTH_HOSTS = list(scrypted_arlo_go.BACKUP_AUTH_HOSTS())
|
||||
TRANSID_PREFIX = 'web'
|
||||
|
||||
random.shuffle(BACKUP_AUTH_HOSTS)
|
||||
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
@@ -166,13 +170,11 @@ class Arlo(object):
|
||||
# in case cloudflare rejects our auth request...
|
||||
logger.warning(f"Using fallback authentication host due to: {e}")
|
||||
|
||||
backup_hosts = list(scrypted_arlo_go.BACKUP_AUTH_HOSTS())
|
||||
random.shuffle(backup_hosts)
|
||||
|
||||
auth_host = pick_host([
|
||||
base64.b64decode(h.encode("utf-8")).decode("utf-8")
|
||||
for h in backup_hosts
|
||||
for h in self.BACKUP_AUTH_HOSTS
|
||||
], self.AUTH_URL, "/api/auth")
|
||||
logger.debug(f"Selected backup authentication host {auth_host}")
|
||||
|
||||
self.request = Request(mode="ip")
|
||||
|
||||
@@ -200,10 +202,15 @@ class Arlo(object):
|
||||
raw=True
|
||||
)
|
||||
factor_id = next(
|
||||
i for i in factors_body['data']['items']
|
||||
if (i['factorType'] == 'EMAIL' or i['factorType'] == 'SMS')
|
||||
and i['factorRole'] == "PRIMARY"
|
||||
)['factorId']
|
||||
iter([
|
||||
i for i in factors_body['data']['items']
|
||||
if (i['factorType'] == 'EMAIL' or i['factorType'] == 'SMS')
|
||||
and i['factorRole'] == "PRIMARY"
|
||||
]),
|
||||
{}
|
||||
).get('factorId')
|
||||
if not factor_id:
|
||||
raise Exception("Could not find valid 2FA method - is the primary 2FA set to either Email or SMS?")
|
||||
|
||||
# Start factor auth
|
||||
start_auth_body = self.request.post(
|
||||
@@ -227,6 +234,9 @@ class Arlo(object):
|
||||
raw=True
|
||||
)
|
||||
|
||||
if finish_auth_body.get('data', {}).get('token') is None:
|
||||
raise Exception("Could not complete 2FA, maybe invalid token? If the error persists, please try reloading the plugin and logging in again.")
|
||||
|
||||
self.request = Request()
|
||||
|
||||
# Update Authorization code with new code
|
||||
@@ -282,14 +292,16 @@ class Arlo(object):
|
||||
cameras[camera['deviceId']] = camera
|
||||
|
||||
# filter out cameras without basestation, where they are their own basestations
|
||||
# for now, keep doorbells and sirens in the list so they get pings
|
||||
# this is so battery-powered devices do not drain due to pings
|
||||
# for wired devices, keep doorbells, sirens, and arloq in the list so they get pings
|
||||
proper_basestations = {}
|
||||
for basestation in basestations.values():
|
||||
if basestation['deviceId'] == basestation.get('parentId') and basestation['deviceType'] not in ['doorbell', 'siren']:
|
||||
if basestation['deviceId'] == basestation.get('parentId') and \
|
||||
basestation['deviceType'] not in ['doorbell', 'siren', 'arloq', 'arloqs']:
|
||||
continue
|
||||
proper_basestations[basestation['deviceId']] = basestation
|
||||
|
||||
logger.info(f"Will send heartbeat to the following basestations: {list(proper_basestations.keys())}")
|
||||
logger.info(f"Will send heartbeat to the following devices: {list(proper_basestations.keys())}")
|
||||
|
||||
# start heartbeat loop with only basestations
|
||||
asyncio.get_event_loop().create_task(heartbeat(self, list(proper_basestations.values())))
|
||||
@@ -629,7 +641,7 @@ class Arlo(object):
|
||||
If you pass in a valid device type, as a string or a list, this method will return an array of just those devices that match that type. An example would be ['basestation', 'camera']
|
||||
To filter provisioned or unprovisioned devices pass in a True/False value for filter_provisioned. By default both types are returned.
|
||||
"""
|
||||
devices = self.request.get(f'https://{self.BASE_URL}/hmsweb/v2/users/devices')
|
||||
devices = self._getDevicesImpl()
|
||||
if device_type:
|
||||
devices = [ device for device in devices if device.get('deviceType') in device_type]
|
||||
|
||||
@@ -641,20 +653,31 @@ class Arlo(object):
|
||||
|
||||
return devices
|
||||
|
||||
async def StartStream(self, basestation, camera):
|
||||
@cached(cache=TTLCache(maxsize=1, ttl=60))
|
||||
def _getDevicesImpl(self):
|
||||
devices = self.request.get(f'https://{self.BASE_URL}/hmsweb/v2/users/devices')
|
||||
return devices
|
||||
|
||||
async def StartStream(self, basestation, camera, mode="rtsp"):
|
||||
"""
|
||||
This function returns the url of the rtsp video stream.
|
||||
This stream needs to be called within 30 seconds or else it becomes invalid.
|
||||
It can be streamed with: ffmpeg -re -i 'rtsps://<url>' -acodec copy -vcodec copy test.mp4
|
||||
The request to /users/devices/startStream returns: { url:rtsp://<url>:443/vzmodulelive?egressToken=b<xx>&userAgent=iOS&cameraId=<camid>}
|
||||
|
||||
If mode is set to "dash", returns the url to the mpd file for DASH streaming.
|
||||
"""
|
||||
resource = f"cameras/{camera.get('deviceId')}"
|
||||
|
||||
if mode not in ["rtsp", "dash"]:
|
||||
raise ValueError("mode must be 'rtsp' or 'dash'")
|
||||
|
||||
# nonlocal variable hack for Python 2.x.
|
||||
class nl:
|
||||
stream_url_dict = None
|
||||
|
||||
def trigger(self):
|
||||
ua = USER_AGENTS['arlo'] if mode == "rtsp" else USER_AGENTS["firefox"]
|
||||
nl.stream_url_dict = self.request.post(
|
||||
f'https://{self.BASE_URL}/hmsweb/users/devices/startStream',
|
||||
params={
|
||||
@@ -670,14 +693,17 @@ class Arlo(object):
|
||||
"cameraId": camera.get('deviceId')
|
||||
}
|
||||
},
|
||||
headers={"xcloudId":camera.get('xCloudId')}
|
||||
headers={"xcloudId":camera.get('xCloudId'), 'User-Agent': ua}
|
||||
)
|
||||
|
||||
def callback(self, event):
|
||||
#return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
|
||||
properties = event.get("properties", {})
|
||||
if properties.get("activityState") == "userStreamActive":
|
||||
return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
|
||||
if mode == "rtsp":
|
||||
return nl.stream_url_dict['url'].replace("rtsp://", "rtsps://")
|
||||
else:
|
||||
return nl.stream_url_dict['url'].replace(":80", "")
|
||||
return None
|
||||
|
||||
return await self.TriggerAndHandleEvent(
|
||||
@@ -688,6 +714,27 @@ class Arlo(object):
|
||||
callback,
|
||||
)
|
||||
|
||||
def GetMPDHeaders(self, url: str) -> dict:
|
||||
parsed = urlparse(url)
|
||||
query = parse_qs(parsed.query)
|
||||
|
||||
headers = {
|
||||
"Accept": "*/*",
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Accept-Language": "en-US,en;q=0.9",
|
||||
"Connection": "keep-alive",
|
||||
"DNT": "1",
|
||||
"Egress-Token": query['egressToken'][0],
|
||||
"Origin": "https://my.arlo.com",
|
||||
"Referer": "https://my.arlo.com/",
|
||||
"User-Agent": USER_AGENTS["firefox"],
|
||||
}
|
||||
return headers
|
||||
|
||||
def GetSIPInfo(self):
|
||||
resp = self.request.get(f'https://{self.BASE_URL}/hmsweb/users/devices/sipInfo')
|
||||
return resp
|
||||
|
||||
def StartPushToTalk(self, basestation, camera):
|
||||
url = f'https://{self.BASE_URL}/hmsweb/users/devices/{self.user_id}_{camera.get("deviceId")}/pushtotalk'
|
||||
resp = self.request.get(url)
|
||||
|
||||
@@ -2,8 +2,7 @@ import ssl
|
||||
from socket import setdefaulttimeout
|
||||
import requests
|
||||
from requests_toolbelt.adapters import host_header_ssl
|
||||
from cryptography import x509
|
||||
from cryptography.x509.oid import ExtensionOID
|
||||
import scrypted_arlo_go
|
||||
|
||||
from .logging import logger
|
||||
|
||||
@@ -18,12 +17,10 @@ def pick_host(hosts, hostname_to_match, endpoint_to_test):
|
||||
for host in hosts:
|
||||
try:
|
||||
c = ssl.get_server_certificate((host, 443))
|
||||
c = x509.load_pem_x509_certificate(c.encode("utf-8"))
|
||||
if hostname_to_match in c.subject.rfc4514_string() or \
|
||||
hostname_to_match in c.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value.get_values_for_type(x509.DNSName):
|
||||
r = session.post(f"https://{host}{endpoint_to_test}", headers={"Host": hostname_to_match})
|
||||
r.raise_for_status()
|
||||
return host
|
||||
scrypted_arlo_go.VerifyCertHostname(c, hostname_to_match)
|
||||
r = session.post(f"https://{host}{endpoint_to_test}", headers={"Host": hostname_to_match})
|
||||
r.raise_for_status()
|
||||
return host
|
||||
except Exception as e:
|
||||
logger.warning(f"{host} is invalid: {e}")
|
||||
raise Exception("no valid hosts found!")
|
||||
@@ -5,16 +5,16 @@ import aiohttp
|
||||
from async_timeout import timeout as async_timeout
|
||||
from datetime import datetime, timedelta
|
||||
import json
|
||||
import threading
|
||||
import time
|
||||
from typing import List, TYPE_CHECKING
|
||||
|
||||
import scrypted_arlo_go
|
||||
|
||||
import scrypted_sdk
|
||||
from scrypted_sdk.types import Setting, Settings, SettingValue, Device, Camera, VideoCamera, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, Charger, ChargeState, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
|
||||
from scrypted_sdk.types import Setting, Settings, SettingValue, Device, Camera, VideoCamera, RequestMediaStreamOptions, VideoClips, VideoClip, VideoClipOptions, MotionSensor, AudioSensor, Battery, Charger, ChargeState, DeviceProvider, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
|
||||
|
||||
from .debug import EXPERIMENTAL
|
||||
from .arlo.arlo_async import USER_AGENTS
|
||||
from .experimental import EXPERIMENTAL
|
||||
from .base import ArloDeviceBase
|
||||
from .spotlight import ArloSpotlight, ArloFloodlight
|
||||
from .vss import ArloSirenVirtualSecuritySystem
|
||||
@@ -76,6 +76,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
|
||||
timeout: int = 30
|
||||
intercom_session = None
|
||||
goSM = None
|
||||
light: ArloSpotlight = None
|
||||
vss: ArloSirenVirtualSecuritySystem = None
|
||||
picture_lock: asyncio.Lock = None
|
||||
@@ -152,14 +153,8 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
ScryptedInterface.Settings.value,
|
||||
])
|
||||
|
||||
if EXPERIMENTAL:
|
||||
if self.two_way_audio:
|
||||
results.discard(ScryptedInterface.RTCSignalingChannel.value)
|
||||
results.add(ScryptedInterface.Intercom.value)
|
||||
|
||||
if self.webrtc_emulation:
|
||||
results.add(ScryptedInterface.RTCSignalingChannel.value)
|
||||
results.discard(ScryptedInterface.Intercom.value)
|
||||
if EXPERIMENTAL or not self.uses_sip_push_to_talk:
|
||||
results.add(ScryptedInterface.Intercom.value)
|
||||
|
||||
if self.has_battery:
|
||||
results.add(ScryptedInterface.Battery.value)
|
||||
@@ -174,11 +169,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
if self.has_cloud_recording:
|
||||
results.add(ScryptedInterface.VideoClips.value)
|
||||
|
||||
if EXPERIMENTAL:
|
||||
if not self._can_push_to_talk():
|
||||
results.discard(ScryptedInterface.RTCSignalingChannel.value)
|
||||
results.discard(ScryptedInterface.Intercom.value)
|
||||
|
||||
return list(results)
|
||||
|
||||
def get_device_type(self) -> str:
|
||||
@@ -220,23 +210,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
] + vss.get_builtin_child_device_manifests())
|
||||
return results
|
||||
|
||||
@property
|
||||
def webrtc_emulation(self) -> bool:
|
||||
if self.storage:
|
||||
return True if self.storage.getItem("webrtc_emulation") else False
|
||||
else:
|
||||
return False
|
||||
|
||||
@property
|
||||
def two_way_audio(self) -> bool:
|
||||
if self.storage:
|
||||
val = self.storage.getItem("two_way_audio")
|
||||
if val is None:
|
||||
val = True
|
||||
return val
|
||||
else:
|
||||
return True
|
||||
|
||||
@property
|
||||
def wired_to_power(self) -> bool:
|
||||
if self.storage:
|
||||
@@ -283,6 +256,10 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
def has_battery(self) -> bool:
|
||||
return not any([self.arlo_device["modelId"].lower().startswith(model) for model in ArloCamera.MODELS_WITHOUT_BATTERY])
|
||||
|
||||
@property
|
||||
def uses_sip_push_to_talk(self) -> bool:
|
||||
return self.arlo_device["deviceId"] == self.arlo_device["parentId"]
|
||||
|
||||
async def getSettings(self) -> List[Setting]:
|
||||
result = []
|
||||
if self.has_battery:
|
||||
@@ -323,26 +300,6 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
"type": "number",
|
||||
}
|
||||
)
|
||||
if self._can_push_to_talk() and EXPERIMENTAL:
|
||||
result.extend([
|
||||
{
|
||||
"group": "General",
|
||||
"key": "two_way_audio",
|
||||
"title": "(Experimental) Enable native two-way audio",
|
||||
"value": self.two_way_audio,
|
||||
"description": "Enables two-way audio for this device. Not yet completely functional on all audio senders.",
|
||||
"type": "boolean",
|
||||
},
|
||||
{
|
||||
"group": "General",
|
||||
"key": "webrtc_emulation",
|
||||
"title": "(Highly Experimental) Emulate WebRTC Camera",
|
||||
"value": self.webrtc_emulation,
|
||||
"description": "Configures the plugin to offer this device as a WebRTC camera, merging video/audio stream with two-way audio. "
|
||||
"If enabled, takes precedence over native two-way audio. May use increased system resources.",
|
||||
"type": "boolean",
|
||||
},
|
||||
])
|
||||
return result
|
||||
|
||||
@async_print_exception_guard
|
||||
@@ -351,7 +308,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
|
||||
return
|
||||
|
||||
if key in ["webrtc_emulation", "two_way_audio", "wired_to_power"]:
|
||||
if key in ["wired_to_power"]:
|
||||
self.storage.setItem(key, value == "true" or value == True)
|
||||
await self.provider.discover_devices()
|
||||
elif key in ["eco_mode"]:
|
||||
@@ -408,8 +365,8 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
|
||||
return await scrypted_sdk.mediaManager.createMediaObject(self.last_picture, "image/jpeg")
|
||||
|
||||
async def getVideoStreamOptions(self) -> List[ResponseMediaStreamOptions]:
|
||||
return [
|
||||
async def getVideoStreamOptions(self, id: str = None) -> List[ResponseMediaStreamOptions]:
|
||||
options = [
|
||||
{
|
||||
"id": 'default',
|
||||
"name": 'Cloud RTSP',
|
||||
@@ -423,72 +380,138 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
"source": 'cloud',
|
||||
"tool": 'scrypted',
|
||||
"userConfigurable": False,
|
||||
},
|
||||
{
|
||||
"id": 'dash',
|
||||
"name": 'Cloud DASH',
|
||||
"container": 'dash',
|
||||
"video": {
|
||||
"codec": 'unknown',
|
||||
},
|
||||
"audio": None if self.arlo_device.get("modelId") == "VMC3030" else {
|
||||
"codec": 'unknown',
|
||||
},
|
||||
"source": 'cloud',
|
||||
"tool": 'ffmpeg',
|
||||
"userConfigurable": False,
|
||||
}
|
||||
]
|
||||
|
||||
async def _getVideoStreamURL(self) -> str:
|
||||
self.logger.info("Requesting stream")
|
||||
rtsp_url = await asyncio.wait_for(self.provider.arlo.StartStream(self.arlo_basestation, self.arlo_device), timeout=self.timeout)
|
||||
self.logger.debug(f"Got stream URL at {rtsp_url}")
|
||||
return rtsp_url
|
||||
if id is None:
|
||||
return options
|
||||
|
||||
async def getVideoStream(self, options: dict = None) -> MediaObject:
|
||||
return next(iter([o for o in options if o['id'] == id]))
|
||||
|
||||
async def _getVideoStreamURL(self, container: str) -> str:
|
||||
self.logger.info(f"Requesting {container} stream")
|
||||
url = await asyncio.wait_for(self.provider.arlo.StartStream(self.arlo_basestation, self.arlo_device, mode=container), timeout=self.timeout)
|
||||
self.logger.debug(f"Got {container} stream URL at {url}")
|
||||
return url
|
||||
|
||||
@async_print_exception_guard
|
||||
async def getVideoStream(self, options: RequestMediaStreamOptions = None) -> MediaObject:
|
||||
self.logger.debug("Entered getVideoStream")
|
||||
rtsp_url = await self._getVideoStreamURL()
|
||||
|
||||
mso = (await self.getVideoStreamOptions())[0]
|
||||
mso = await self.getVideoStreamOptions(id=options["id"])
|
||||
mso['refreshAt'] = round(time.time() * 1000) + 30 * 60 * 1000
|
||||
container = mso["container"]
|
||||
|
||||
url = await self._getVideoStreamURL(container)
|
||||
additional_ffmpeg_args = []
|
||||
|
||||
if container == "dash":
|
||||
headers = self.provider.arlo.GetMPDHeaders(url)
|
||||
ffmpeg_headers = '\r\n'.join([
|
||||
f'{k}: {v}'
|
||||
for k, v in headers.items()
|
||||
])
|
||||
additional_ffmpeg_args = ['-headers', ffmpeg_headers+'\r\n']
|
||||
|
||||
ffmpeg_input = {
|
||||
'url': rtsp_url,
|
||||
'container': 'rtsp',
|
||||
'url': url,
|
||||
'container': container,
|
||||
'mediaStreamOptions': mso,
|
||||
'inputArguments': [
|
||||
'-f', 'rtsp',
|
||||
'-i', rtsp_url,
|
||||
'-f', container,
|
||||
*additional_ffmpeg_args,
|
||||
'-i', url,
|
||||
]
|
||||
}
|
||||
return await scrypted_sdk.mediaManager.createFFmpegMediaObject(ffmpeg_input)
|
||||
|
||||
@async_print_exception_guard
|
||||
async def startRTCSignalingSession(self, scrypted_session):
|
||||
plugin_session = ArloCameraRTCSignalingSession(self)
|
||||
await plugin_session.initialize()
|
||||
|
||||
scrypted_setup = {
|
||||
"type": "offer",
|
||||
"audio": {
|
||||
"direction": "sendrecv" if self._can_push_to_talk() else "recvonly",
|
||||
},
|
||||
"video": {
|
||||
"direction": "recvonly",
|
||||
}
|
||||
}
|
||||
plugin_setup = {}
|
||||
|
||||
scrypted_offer = await scrypted_session.createLocalDescription("offer", scrypted_setup, sendIceCandidate=plugin_session.addIceCandidate)
|
||||
self.logger.info(f"Scrypted offer sdp:\n{scrypted_offer['sdp']}")
|
||||
await plugin_session.setRemoteDescription(scrypted_offer, plugin_setup)
|
||||
plugin_answer = await plugin_session.createLocalDescription("answer", plugin_setup, scrypted_session.sendIceCandidate)
|
||||
self.logger.info(f"Scrypted answer sdp:\n{plugin_answer['sdp']}")
|
||||
await scrypted_session.setRemoteDescription(plugin_answer, scrypted_setup)
|
||||
|
||||
return ArloCameraRTCSessionControl(plugin_session)
|
||||
|
||||
async def startIntercom(self, media) -> None:
|
||||
self.logger.info("Starting intercom")
|
||||
self.intercom_session = ArloCameraRTCSignalingSession(self)
|
||||
await self.intercom_session.initialize_push_to_talk(media)
|
||||
|
||||
if self.uses_sip_push_to_talk:
|
||||
sip_info = self.provider.arlo.GetSIPInfo()
|
||||
sip_call_info = sip_info["sipCallInfo"]
|
||||
|
||||
ice_servers = [{"url": "stun:stun.l.google.com:19302"}]
|
||||
self.logger.debug(f"Will use ice servers: {[ice['url'] for ice in ice_servers]}")
|
||||
|
||||
ice_servers = scrypted_arlo_go.Slice_webrtc_ICEServer([
|
||||
scrypted_arlo_go.NewWebRTCICEServer(
|
||||
scrypted_arlo_go.go.Slice_string([ice['url']]),
|
||||
ice.get('username', ''),
|
||||
ice.get('credential', '')
|
||||
)
|
||||
for ice in ice_servers
|
||||
])
|
||||
sip_cfg = scrypted_arlo_go.SIPInfo(
|
||||
DeviceID=self.nativeId,
|
||||
CallerURI=f"sip:{sip_call_info['id']}@{sip_call_info['domain']}:{sip_call_info['port']}",
|
||||
CalleeURI=sip_call_info['calleeUri'],
|
||||
Password=sip_call_info['password'],
|
||||
UserAgent="SIP.js/0.20.1",
|
||||
WebsocketURI="wss://livestream-z2-prod.arlo.com:7443",
|
||||
WebsocketOrigin="https://my.arlo.com",
|
||||
WebsocketHeaders=scrypted_arlo_go.HeadersMap({"User-Agent": USER_AGENTS["arlo"]}),
|
||||
)
|
||||
|
||||
self.goSM = scrypted_arlo_go.NewSIPWebRTCManager("Arlo SIP "+self.nativeId, ice_servers, sip_cfg)
|
||||
|
||||
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
|
||||
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
|
||||
audio_port = self.goSM.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
|
||||
|
||||
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
|
||||
ffmpeg_args = [
|
||||
"-y",
|
||||
"-hide_banner",
|
||||
"-loglevel", "error",
|
||||
"-analyzeduration", "0",
|
||||
"-fflags", "-nobuffer",
|
||||
"-probesize", "500000",
|
||||
*ffmpeg_params["inputArguments"],
|
||||
"-vn",
|
||||
"-acodec", "libopus",
|
||||
"-f", "rtp",
|
||||
"-flush_packets", "1",
|
||||
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
|
||||
]
|
||||
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with '{' '.join(ffmpeg_args)}'")
|
||||
|
||||
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo Subprocess "+self.logger_name, ffmpeg_path, *ffmpeg_args)
|
||||
self.intercom_ffmpeg_subprocess.start()
|
||||
|
||||
self.goSM.Start()
|
||||
else:
|
||||
# we need to do signaling through arlo cloud apis
|
||||
self.intercom_session = ArloCameraIntercomSession(self)
|
||||
await self.intercom_session.initialize_push_to_talk(media)
|
||||
|
||||
self.logger.info("Intercom ready")
|
||||
|
||||
@async_print_exception_guard
|
||||
async def stopIntercom(self) -> None:
|
||||
self.logger.info("Stopping intercom")
|
||||
if self.intercom_session is not None:
|
||||
await self.intercom_session.shutdown()
|
||||
self.intercom_session = None
|
||||
|
||||
def _can_push_to_talk(self) -> bool:
|
||||
# Right now, only implement push to talk for basestation cameras
|
||||
return self.arlo_device["deviceId"] != self.arlo_device["parentId"]
|
||||
if self.goSM is not None:
|
||||
self.goSM.Close()
|
||||
self.goSM = None
|
||||
|
||||
async def getVideoClip(self, videoId: str) -> MediaObject:
|
||||
self.logger.info(f"Getting video clip {videoId}")
|
||||
@@ -580,7 +603,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, DeviceProvider,
|
||||
return self.vss
|
||||
|
||||
|
||||
class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
|
||||
class ArloCameraIntercomSession(BackgroundTaskMixin):
|
||||
def __init__(self, camera):
|
||||
super().__init__()
|
||||
self.camera = camera
|
||||
@@ -589,10 +612,8 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
|
||||
self.arlo_device = camera.arlo_device
|
||||
self.arlo_basestation = camera.arlo_basestation
|
||||
|
||||
self.ffmpeg_subprocess = None
|
||||
self.intercom_ffmpeg_subprocess = None
|
||||
|
||||
self.scrypted_pc = None
|
||||
self.arlo_pc = None
|
||||
self.arlo_sdp_answered = False
|
||||
|
||||
@@ -639,24 +660,26 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
|
||||
self.provider.arlo.SubscribeToCandidateAnswers(self.arlo_basestation, self.arlo_device, callback)
|
||||
)
|
||||
|
||||
async def initialize(self):
|
||||
self.logger.info("Initializing video stream for RTC")
|
||||
rtsp_url = await self.camera._getVideoStreamURL()
|
||||
@async_print_exception_guard
|
||||
async def initialize_push_to_talk(self, media):
|
||||
self.logger.info("Initializing push to talk")
|
||||
|
||||
cfg = scrypted_arlo_go.WebRTCConfiguration(
|
||||
ICEServers=scrypted_arlo_go.Slice_webrtc_ICEServer([
|
||||
scrypted_arlo_go.NewWebRTCICEServer(
|
||||
scrypted_arlo_go.go.Slice_string(["turn:turn0.clockworkmod.com", "turn:n0.clockworkmod.com", "turn:n1.clockworkmod.com"]),
|
||||
"foo",
|
||||
"bar"
|
||||
)
|
||||
])
|
||||
)
|
||||
cfg = scrypted_arlo_go.WebRTCConfiguration()
|
||||
self.scrypted_pc = scrypted_arlo_go.NewWebRTCManager("Arlo "+self.camera.logger_name, cfg)
|
||||
session_id, ice_servers = self.provider.arlo.StartPushToTalk(self.arlo_basestation, self.arlo_device)
|
||||
self.logger.debug(f"Received ice servers: {[ice['url'] for ice in ice_servers]}")
|
||||
|
||||
audio_port = self.scrypted_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
|
||||
video_port = self.scrypted_pc.InitializeVideoRTPListener(scrypted_arlo_go.WebRTCMimeTypeH264)
|
||||
ice_servers = scrypted_arlo_go.Slice_webrtc_ICEServer([
|
||||
scrypted_arlo_go.NewWebRTCICEServer(
|
||||
scrypted_arlo_go.go.Slice_string([ice['url']]),
|
||||
ice.get('username', ''),
|
||||
ice.get('credential', '')
|
||||
)
|
||||
for ice in ice_servers
|
||||
])
|
||||
self.arlo_pc = scrypted_arlo_go.NewWebRTCManager("Arlo WebRTC "+self.camera.logger_name, ice_servers)
|
||||
|
||||
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
|
||||
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
|
||||
audio_port = self.arlo_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
|
||||
|
||||
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
|
||||
ffmpeg_args = [
|
||||
@@ -665,200 +688,48 @@ class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
|
||||
"-loglevel", "error",
|
||||
"-analyzeduration", "0",
|
||||
"-fflags", "-nobuffer",
|
||||
"-max_probe_packets", "2",
|
||||
"-vcodec", "h264",
|
||||
"-acodec", "aac",
|
||||
"-i", rtsp_url,
|
||||
"-an",
|
||||
"-vcodec", "copy",
|
||||
"-f", "rtp",
|
||||
"-flush_packets", "1",
|
||||
f"rtp://localhost:{video_port}",
|
||||
"-probesize", "500000",
|
||||
*ffmpeg_params["inputArguments"],
|
||||
"-vn",
|
||||
"-acodec", "libopus",
|
||||
"-f", "rtp",
|
||||
"-flush_packets", "1",
|
||||
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
|
||||
]
|
||||
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with {ffmpeg_args}")
|
||||
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with '{' '.join(ffmpeg_args)}'")
|
||||
|
||||
self.ffmpeg_subprocess = HeartbeatChildProcess("Arlo "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
|
||||
self.ffmpeg_subprocess.start()
|
||||
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo Subprocess "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
|
||||
self.intercom_ffmpeg_subprocess.start()
|
||||
|
||||
if self.camera._can_push_to_talk():
|
||||
self.create_task(self.initialize_push_to_talk())
|
||||
self.sdp_answered = False
|
||||
|
||||
async def initialize_push_to_talk(self, media=None):
|
||||
try:
|
||||
self.logger.info("Initializing push to talk")
|
||||
offer = self.arlo_pc.CreateOffer()
|
||||
offer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(offer)
|
||||
self.logger.info(f"Arlo offer sdp:\n{offer_sdp}")
|
||||
|
||||
session_id, ice_servers = self.provider.arlo.StartPushToTalk(self.arlo_basestation, self.arlo_device)
|
||||
self.logger.debug(f"Received ice servers: {[ice['url'] for ice in ice_servers]}")
|
||||
self.arlo_pc.SetLocalDescription(offer)
|
||||
|
||||
cfg = scrypted_arlo_go.WebRTCConfiguration(
|
||||
ICEServers=scrypted_arlo_go.Slice_webrtc_ICEServer([
|
||||
scrypted_arlo_go.NewWebRTCICEServer(
|
||||
scrypted_arlo_go.go.Slice_string([ice['url']]),
|
||||
ice.get('username', ''),
|
||||
ice.get('credential', '')
|
||||
)
|
||||
for ice in ice_servers
|
||||
])
|
||||
)
|
||||
self.arlo_pc = scrypted_arlo_go.NewWebRTCManager("Arlo "+self.camera.logger_name, cfg)
|
||||
self.provider.arlo.NotifyPushToTalkSDP(
|
||||
self.arlo_basestation, self.arlo_device,
|
||||
session_id, offer_sdp
|
||||
)
|
||||
|
||||
if media is not None:
|
||||
ffmpeg_params = json.loads(await scrypted_sdk.mediaManager.convertMediaObjectToBuffer(media, ScryptedMimeTypes.FFmpegInput.value))
|
||||
self.logger.debug(f"Received ffmpeg params: {ffmpeg_params}")
|
||||
audio_port = self.arlo_pc.InitializeAudioRTPListener(scrypted_arlo_go.WebRTCMimeTypeOpus)
|
||||
|
||||
ffmpeg_path = await scrypted_sdk.mediaManager.getFFmpegPath()
|
||||
ffmpeg_args = [
|
||||
"-y",
|
||||
"-hide_banner",
|
||||
"-loglevel", "error",
|
||||
"-analyzeduration", "0",
|
||||
"-fflags", "-nobuffer",
|
||||
"-probesize", "500000",
|
||||
*ffmpeg_params["inputArguments"],
|
||||
"-vn",
|
||||
"-acodec", "libopus",
|
||||
"-f", "rtp",
|
||||
"-flush_packets", "1",
|
||||
f"rtp://localhost:{audio_port}?pkt_size={scrypted_arlo_go.UDP_PACKET_SIZE()}",
|
||||
]
|
||||
self.logger.debug(f"Starting ffmpeg at {ffmpeg_path} with {ffmpeg_args}")
|
||||
|
||||
self.intercom_ffmpeg_subprocess = HeartbeatChildProcess("Arlo "+self.camera.logger_name, ffmpeg_path, *ffmpeg_args)
|
||||
self.intercom_ffmpeg_subprocess.start()
|
||||
else:
|
||||
self.logger.debug("Starting audio track forwarder")
|
||||
self.scrypted_pc.ForwardAudioTo(self.arlo_pc)
|
||||
self.logger.debug("Started audio track forwarder")
|
||||
|
||||
self.sdp_answered = False
|
||||
|
||||
offer = self.arlo_pc.CreateOffer()
|
||||
offer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(offer)
|
||||
self.logger.info(f"Arlo offer sdp:\n{offer_sdp}")
|
||||
|
||||
self.arlo_pc.SetLocalDescription(offer)
|
||||
|
||||
self.provider.arlo.NotifyPushToTalkSDP(
|
||||
candidates = self.arlo_pc.WaitAndGetICECandidates()
|
||||
self.logger.debug(f"Gathered {len(candidates)} candidates")
|
||||
for candidate in candidates:
|
||||
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
|
||||
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
|
||||
).Candidate
|
||||
self.logger.debug(f"Sending candidate to Arlo: {candidate}")
|
||||
self.provider.arlo.NotifyPushToTalkCandidate(
|
||||
self.arlo_basestation, self.arlo_device,
|
||||
session_id, offer_sdp
|
||||
session_id, candidate,
|
||||
)
|
||||
|
||||
def forward_candidates():
|
||||
try:
|
||||
candidates = self.arlo_pc.WaitAndGetICECandidates()
|
||||
self.logger.debug(f"Gathered {len(candidates)} candidates")
|
||||
for candidate in candidates:
|
||||
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
|
||||
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
|
||||
).Candidate
|
||||
self.logger.debug(f"Sending candidate to Arlo: {candidate}")
|
||||
self.provider.arlo.NotifyPushToTalkCandidate(
|
||||
self.arlo_basestation, self.arlo_device,
|
||||
session_id, candidate,
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
t = threading.Thread(target=forward_candidates)
|
||||
t.start()
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
|
||||
async def createLocalDescription(self, type, setup, sendIceCandidate=None):
|
||||
if type == "offer":
|
||||
raise Exception("can only create answers in ArloCameraRTCSignalingSession.createLocalDescription")
|
||||
|
||||
answer = self.scrypted_pc.CreateAnswer()
|
||||
answer_sdp = scrypted_arlo_go.WebRTCSessionDescriptionSDP(answer)
|
||||
|
||||
self.scrypted_pc.SetLocalDescription(answer)
|
||||
|
||||
if sendIceCandidate is not None:
|
||||
loop = asyncio.get_event_loop()
|
||||
def forward_candidates():
|
||||
try:
|
||||
candidates = self.scrypted_pc.WaitAndGetICECandidates()
|
||||
self.logger.debug(f"Gathered {len(candidates)} candidates")
|
||||
for candidate in candidates:
|
||||
candidate = scrypted_arlo_go.WebRTCICECandidateInit(
|
||||
scrypted_arlo_go.WebRTCICECandidate(handle=candidate).ToJSON()
|
||||
).Candidate
|
||||
self.logger.debug(f"Sending candidate to scrypted: {candidate}")
|
||||
loop.call_soon_threadsafe(
|
||||
self.create_task,
|
||||
sendIceCandidate({
|
||||
"candidate": candidate,
|
||||
"sdpMid": "0",
|
||||
"sdpMLineIndex": 0,
|
||||
})
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
t = threading.Thread(target=forward_candidates)
|
||||
t.start()
|
||||
|
||||
return {
|
||||
"sdp": answer_sdp,
|
||||
"type": "answer"
|
||||
}
|
||||
|
||||
async def setRemoteDescription(self, description, setup):
|
||||
if description["type"] != "offer":
|
||||
raise Exception("can only accept offers in ArloCameraRTCSignalingSession.createLocalDescription")
|
||||
|
||||
sdp = scrypted_arlo_go.WebRTCSessionDescription(scrypted_arlo_go.NewWebRTCSDPType("offer"), description["sdp"])
|
||||
self.scrypted_pc.SetRemoteDescription(sdp)
|
||||
|
||||
async def addIceCandidate(self, candidate):
|
||||
candidate = scrypted_arlo_go.WebRTCICECandidateInit(candidate["candidate"], "0", 0)
|
||||
self.scrypted_pc.AddICECandidate(candidate)
|
||||
|
||||
async def getOptions(self):
|
||||
pass
|
||||
|
||||
async def unmute_relay(self):
|
||||
return
|
||||
await self.arlo_pc.unmute_relay(self.arlo_relay_track)
|
||||
|
||||
async def mute_relay(self):
|
||||
return
|
||||
await self.arlo_pc.mute_relay(self.arlo_relay_track)
|
||||
|
||||
async def shutdown(self):
|
||||
if self.ffmpeg_subprocess is not None:
|
||||
self.ffmpeg_subprocess.stop()
|
||||
self.ffmpeg_subprocess = None
|
||||
if self.intercom_ffmpeg_subprocess is not None:
|
||||
self.intercom_ffmpeg_subprocess.stop()
|
||||
self.intercom_ffmpeg_subprocess = None
|
||||
if self.scrypted_pc is not None:
|
||||
self.scrypted_pc.Close()
|
||||
self.scrypted_pc = None
|
||||
if self.arlo_pc is not None:
|
||||
self.arlo_pc.Close()
|
||||
self.arlo_pc = None
|
||||
|
||||
|
||||
class ArloCameraRTCSessionControl:
|
||||
def __init__(self, arlo_session):
|
||||
self.arlo_session = arlo_session
|
||||
self.logger = arlo_session.logger
|
||||
|
||||
async def setPlayback(self, options):
|
||||
self.logger.debug(f"setPlayback options {options}")
|
||||
audio = options.get("audio")
|
||||
if audio is None:
|
||||
return
|
||||
if audio:
|
||||
await self.arlo_session.unmute_relay()
|
||||
else:
|
||||
await self.arlo_session.mute_relay()
|
||||
|
||||
async def endSession(self):
|
||||
self.logger.info("Ending RTC session")
|
||||
await self.arlo_session.shutdown()
|
||||
self.arlo_pc = None
|
||||
@@ -87,6 +87,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
|
||||
@property
|
||||
def arlo_transport(self) -> str:
|
||||
return "SSE"
|
||||
# This code is here for posterity, however it looks that as of 06/01/2023
|
||||
# Arlo has disabled the MQTT backend
|
||||
transport = self.storage.getItem("arlo_transport")
|
||||
if transport is None or transport not in ArloProvider.arlo_transport_choices:
|
||||
transport = "SSE"
|
||||
@@ -149,13 +152,15 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
def arlo(self) -> Arlo:
|
||||
if self._arlo is not None:
|
||||
if self._arlo_mfa_complete_auth is not None:
|
||||
if self._arlo_mfa_code == "":
|
||||
if not self._arlo_mfa_code:
|
||||
return None
|
||||
|
||||
self.logger.info("Completing Arlo MFA...")
|
||||
self._arlo_mfa_complete_auth(self._arlo_mfa_code)
|
||||
self._arlo_mfa_complete_auth = None
|
||||
self._arlo_mfa_code = None
|
||||
try:
|
||||
self._arlo_mfa_complete_auth(self._arlo_mfa_code)
|
||||
finally:
|
||||
self._arlo_mfa_complete_auth = None
|
||||
self._arlo_mfa_code = None
|
||||
self.logger.info("Arlo MFA done")
|
||||
|
||||
self.storage.setItem("arlo_auth_headers", json.dumps(dict(self._arlo.request.session.headers.items())))
|
||||
@@ -175,7 +180,6 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
if headers:
|
||||
self._arlo.UseExistingAuth(self.arlo_user_id, json.loads(headers))
|
||||
self.logger.info(f"Initialized Arlo client, reusing stored auth headers")
|
||||
|
||||
self.create_task(self.do_arlo_setup())
|
||||
return self._arlo
|
||||
else:
|
||||
@@ -185,6 +189,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
self._arlo = None
|
||||
self._arlo_mfa_complete_auth = None
|
||||
self._arlo_mfa_code = None
|
||||
return None
|
||||
|
||||
@@ -455,9 +460,9 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
"group": "General",
|
||||
"key": "arlo_transport",
|
||||
"title": "Underlying Transport Protocol",
|
||||
"description": "Select the underlying transport protocol used to connect to Arlo Cloud.",
|
||||
"description": "Arlo Cloud currently only supports the SSE protocol.",
|
||||
"value": self.arlo_transport,
|
||||
"choices": self.arlo_transport_choices,
|
||||
"readonly": True,
|
||||
},
|
||||
{
|
||||
"group": "General",
|
||||
@@ -627,7 +632,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
device = await self.getDevice_impl(nativeId)
|
||||
scrypted_interfaces = device.get_applicable_interfaces()
|
||||
manifest = device.get_device_manifest()
|
||||
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
|
||||
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']} parent {camera['parentId']}): {scrypted_interfaces}")
|
||||
|
||||
if camera["deviceId"] == camera["parentId"]:
|
||||
provider_to_device_map.setdefault(None, []).append(manifest)
|
||||
@@ -647,6 +652,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, ScryptedDeviceL
|
||||
|
||||
if len(cameras) != len(camera_devices):
|
||||
self.logger.info(f"Discovered {len(cameras)} cameras, but only {len(camera_devices)} are usable")
|
||||
self.logger.info(f"Are all cameras shared with admin permissions?")
|
||||
else:
|
||||
self.logger.info(f"Discovered {len(cameras)} cameras")
|
||||
|
||||
|
||||
@@ -3,9 +3,8 @@ sseclient==0.0.22
|
||||
aiohttp==3.8.4
|
||||
requests==2.28.2
|
||||
cachetools==5.3.0
|
||||
scrypted-arlo-go==0.0.2
|
||||
scrypted-arlo-go==0.1.3
|
||||
cloudscraper==1.2.71
|
||||
cryptography==38.0.4
|
||||
async-timeout==4.0.2
|
||||
--extra-index-url=https://www.piwheels.org/simple/
|
||||
--extra-index-url=https://bjia56.github.io/scrypted-arlo-go/
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
The C300X Plugin for Scrypted allows viewing your C300X intercom with incoming video/audio.
|
||||
|
||||
WARNING: You will need access to the device, see https://github.com/fquinto/bticinoClasse300x
|
||||
WARNING: You will need access to the device, see https://github.com/fquinto/bticinoClasse300x.
|
||||
|
||||
You also need the **[c300x-controller](https://github.com/slyoldfox/c300x-controller)** and node (v17.9.1) running on your device which will expose an API for the intercom.
|
||||
|
||||
## Development instructions
|
||||
|
||||
@@ -17,12 +19,37 @@ $ num run scrypted-deploy 127.0.0.1
|
||||
|
||||
After flashing a custom firmware you must at least:
|
||||
|
||||
* Install [node](https://nodejs.org/download/release/latest-v17.x/node-v17.9.1-linux-armv7l.tar.gz) on your device and run the c300x-controller on the device
|
||||
* Install [/lib/libatomic.so.1](http://ftp.de.debian.org/debian/pool/main/g/gcc-10-cross/libatomic1-armhf-cross_10.2.1-6cross1_all.deb) in **/lib**
|
||||
* Allow access to the SIP server on port 5060
|
||||
* Allow your IP to authenticated with the SIP server
|
||||
* Add a SIP user for scrypted
|
||||
|
||||
To do this use the guide below:
|
||||
|
||||
## Installing node and c300x-controller
|
||||
|
||||
```
|
||||
$ cd /home/bticino/cfg/extra/
|
||||
$ mkdir node
|
||||
$ cd node
|
||||
$ wget https://nodejs.org/download/release/latest-v17.x/node-v17.9.1-linux-armv7l.tar.gz
|
||||
$ tar xvfz node-v17.9.1-linux-armv7l.tar.gz
|
||||
```
|
||||
|
||||
Node will require libatomic.so.1 which isn't shipped with the device, get the .deb file from http://ftp.de.debian.org/debian/pool/main/g/gcc-10-cross/libatomic1-armhf-cross_10.2.1-6cross1_all.deb
|
||||
|
||||
```
|
||||
$ ar x libatomic1-armhf-cross_10.2.1-6cross1_all.deb
|
||||
```
|
||||
|
||||
scp the `libatomic.so.1` to `/lib` and check that node works:
|
||||
|
||||
```
|
||||
$ root@C3X-00-00-00-00-00--2222222:~# /home/bticino/cfg/extra/node/bin/node -v
|
||||
v17.9.1
|
||||
```
|
||||
|
||||
## Make flexisip listen on a reachable IP and add users to it
|
||||
|
||||
To be able to talk to our own SIP server, we need to make the SIP server on the C300X
|
||||
@@ -93,7 +120,7 @@ hashed-passwords=true
|
||||
reject-wrong-client-certificates=true
|
||||
````
|
||||
|
||||
Now we will add a `user agent` (user) that will be used by `baresip` to register itself with `flexisip`
|
||||
Now we will add a `user agent` (user) that will be used by `scrypted` to register itself with `flexisip`
|
||||
|
||||
Edit the `/etc/flexisip/users/users.db.txt` file and create a new line by copy/pasting the c300x user.
|
||||
|
||||
@@ -101,7 +128,7 @@ For example:
|
||||
|
||||
````
|
||||
c300x@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
|
||||
baresip@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
|
||||
scrypted@1234567.bs.iotleg.com md5:ffffffffffffffffffffffffffffffff ;
|
||||
````
|
||||
|
||||
Leave the md5 as the same value - I use `fffff....` just for this example.
|
||||
@@ -110,7 +137,7 @@ Edit the `/etc/flexisip/users/route.conf` file and add a new line to it, it spec
|
||||
Change the IP address to the place where you will run `baresip` (same as `trusted-hosts` above)
|
||||
|
||||
````
|
||||
<sip:baresip@1234567.bs.iotleg.com> <sip:192.168.0.XX>
|
||||
<sip:scrypted@1234567.bs.iotleg.com> <sip:192.168.0.XX>
|
||||
````
|
||||
|
||||
Edit the `/etc/flexisip/users/route_int.conf` file.
|
||||
@@ -121,7 +148,7 @@ You can look at it as a group of users that is called when you call `alluser@123
|
||||
|
||||
Add your username at the end (make sure you stay on the same line, NOT a new line!)
|
||||
````
|
||||
<sip:alluser@1234567.bs.iotleg.com> ..., <sip:baresip@1234567.bs.iotleg.com>
|
||||
<sip:alluser@1234567.bs.iotleg.com> ..., <sip:scrypted@1234567.bs.iotleg.com>
|
||||
````
|
||||
|
||||
Reboot and verify flexisip is listening on the new IP address.
|
||||
|
||||
4
plugins/bticino/package-lock.json
generated
4
plugins/bticino/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/bticino",
|
||||
"version": "0.0.7",
|
||||
"version": "0.0.9",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/bticino",
|
||||
"version": "0.0.7",
|
||||
"version": "0.0.9",
|
||||
"dependencies": {
|
||||
"@slyoldfox/sip": "^0.0.6-1",
|
||||
"sdp": "^3.0.3",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/bticino",
|
||||
"version": "0.0.7",
|
||||
"version": "0.0.9",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
"prescrypted-setup-project": "scrypted-package-json",
|
||||
|
||||
@@ -2,7 +2,7 @@ import { closeQuiet, createBindZero, listenZeroSingleClient } from '@scrypted/co
|
||||
import { sleep } from '@scrypted/common/src/sleep';
|
||||
import { RtspServer } from '@scrypted/common/src/rtsp-server';
|
||||
import { addTrackControls } from '@scrypted/common/src/sdp-utils';
|
||||
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
|
||||
import sdk, { BinarySensor, Camera, DeviceProvider, FFmpegInput, HttpRequest, HttpRequestHandler, HttpResponse, Intercom, MediaObject, MediaStreamUrl, PictureOptions, Reboot, ResponseMediaStreamOptions, ScryptedDevice, ScryptedDeviceBase, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera, VideoClip, VideoClipOptions, VideoClips } from '@scrypted/sdk';
|
||||
import { SipCallSession } from '../../sip/src/sip-call-session';
|
||||
import { RtpDescription } from '../../sip/src/rtp-utils';
|
||||
import { VoicemailHandler } from './bticino-voicemailHandler';
|
||||
@@ -19,11 +19,12 @@ import { InviteHandler } from './bticino-inviteHandler';
|
||||
import { SipRequest } from '../../sip/src/sip-manager';
|
||||
|
||||
import { get } from 'http'
|
||||
import { ControllerApi } from './c300x-controller-api';
|
||||
|
||||
const STREAM_TIMEOUT = 65000;
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips {
|
||||
export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvider, Intercom, Camera, VideoCamera, Settings, BinarySensor, HttpRequestHandler, VideoClips, Reboot {
|
||||
|
||||
private session: SipCallSession
|
||||
private remoteRtpDescription: RtpDescription
|
||||
@@ -35,8 +36,9 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
|
||||
public requestHandlers: CompositeSipMessageHandler = new CompositeSipMessageHandler()
|
||||
public incomingCallRequest : SipRequest
|
||||
private settingsStorage: BticinoStorageSettings = new BticinoStorageSettings( this )
|
||||
public voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
|
||||
private voicemailHandler : VoicemailHandler = new VoicemailHandler(this)
|
||||
private inviteHandler : InviteHandler = new InviteHandler(this)
|
||||
private controllerApi : ControllerApi = new ControllerApi(this)
|
||||
//TODO: randomize this
|
||||
private keyAndSalt : string = "/qE7OPGKp9hVGALG2KcvKWyFEZfSSvm7bYVDjT8X"
|
||||
//private decodedSrtpOptions : SrtpOptions = decodeSrtpOptions( this.keyAndSalt )
|
||||
@@ -55,14 +57,24 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
|
||||
})();
|
||||
}
|
||||
|
||||
reboot(): Promise<void> {
|
||||
return new Promise<void>( (resolve,reject ) => {
|
||||
let c300x = SipHelper.getIntercomIp(this)
|
||||
|
||||
get(`http://${c300x}:8080/reboot?now`, (res) => {
|
||||
console.log("Reboot API result: " + res.statusCode)
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
getVideoClips(options?: VideoClipOptions): Promise<VideoClip[]> {
|
||||
return new Promise<VideoClip[]>( (resolve,reject ) => {
|
||||
let c300x = SipHelper.getIntercomIp(this)
|
||||
if( !c300x ) return []
|
||||
get(`http://${c300x}:8080/videoclips?raw=true&startTime=${options.startTime/1000}&endTime=${options.endTime/1000}`, (res) => {
|
||||
let rawData = '';
|
||||
res.on('data', (chunk) => { rawData += chunk; });
|
||||
res.on('end', () => {
|
||||
let rawData = '';
|
||||
res.on('data', (chunk) => { rawData += chunk; });
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const parsedData : [] = JSON.parse(rawData);
|
||||
let videoClips : VideoClip[] = []
|
||||
@@ -93,7 +105,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
|
||||
return mediaManager.createMediaObjectFromUrl(url);
|
||||
}
|
||||
getVideoClipThumbnail(thumbnailId: string): Promise<MediaObject> {
|
||||
let c300x = SipHelper.sipOptions(this)
|
||||
let c300x = SipHelper.getIntercomIp(this)
|
||||
const url = `http://${c300x}:8080/voicemail?msg=${thumbnailId}/aswm.jpg&raw=true`;
|
||||
return mediaManager.createMediaObjectFromUrl(url);
|
||||
}
|
||||
@@ -224,8 +236,6 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
|
||||
}
|
||||
|
||||
this.stopSession();
|
||||
|
||||
|
||||
const { clientPromise: playbackPromise, port: playbackPort, url: clientUrl } = await listenZeroSingleClient()
|
||||
|
||||
const playbackUrl = clientUrl
|
||||
@@ -234,6 +244,7 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
|
||||
client.setKeepAlive(true, 10000)
|
||||
let sip: SipCallSession
|
||||
try {
|
||||
await this.controllerApi.updateStreamEndpoint()
|
||||
let rtsp: RtspServer;
|
||||
const cleanup = () => {
|
||||
client.destroy();
|
||||
@@ -366,6 +377,9 @@ export class BticinoSipCamera extends ScryptedDeviceBase implements DeviceProvid
|
||||
}
|
||||
|
||||
async releaseDevice(id: string, nativeId: string): Promise<void> {
|
||||
this.voicemailHandler.cancelTimer()
|
||||
this.persistentSipManager.cancelTimer()
|
||||
this.controllerApi.cancelTimer()
|
||||
}
|
||||
|
||||
reset() {
|
||||
|
||||
@@ -6,7 +6,7 @@ export class VoicemailHandler extends SipRequestHandler {
|
||||
|
||||
constructor( private sipCamera : BticinoSipCamera ) {
|
||||
super()
|
||||
setTimeout( () => {
|
||||
this.timeout = setTimeout( () => {
|
||||
// Delay a bit an run in a different thread in case this fails
|
||||
this.checkVoicemail()
|
||||
}, 10000 )
|
||||
@@ -25,7 +25,7 @@ export class VoicemailHandler extends SipRequestHandler {
|
||||
this.timeout = setTimeout( () => this.checkVoicemail() , 5 * 60 * 1000 )
|
||||
}
|
||||
|
||||
cancelVoicemailCheck() {
|
||||
cancelTimer() {
|
||||
if( this.timeout ) {
|
||||
clearTimeout(this.timeout)
|
||||
}
|
||||
|
||||
125
plugins/bticino/src/c300x-controller-api.ts
Normal file
125
plugins/bticino/src/c300x-controller-api.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
import * as nodeIp from "ip";
|
||||
import { get } from 'http'
|
||||
import * as net from 'net'
|
||||
import { BticinoSipCamera } from "./bticino-camera";
|
||||
import { SipHelper } from './sip-helper';
|
||||
|
||||
export class ControllerApi {
|
||||
private timeout : NodeJS.Timeout
|
||||
|
||||
constructor( private sipCamera : BticinoSipCamera ) {
|
||||
this.timeout = setTimeout( () => {
|
||||
// Delay a bit an run in a different thread in case this fails
|
||||
this.registerEndpoints( true )
|
||||
}, 5000 )
|
||||
}
|
||||
|
||||
/**
|
||||
* Will validate certain requirements for scrypted to work correctly with the intercom:
|
||||
*/
|
||||
public static validate( ipAddress ) {
|
||||
return this.validateFlexisipSipPort(ipAddress).then( this.validateController )
|
||||
}
|
||||
|
||||
/**
|
||||
* Will validate if the non secure SIP port was opened after modifying /etc/init.d/flexisipsh
|
||||
*/
|
||||
private static validateFlexisipSipPort( ipAddress : string ) : Promise<string> {
|
||||
let conn = net.createConnection( { host: ipAddress, port: 5060, timeout: 5000 } )
|
||||
return new Promise( (resolve, reject) => {
|
||||
conn.setTimeout(5000);
|
||||
conn.on('connect', () => resolve( ipAddress ));
|
||||
conn.on('timeout', () => reject( new Error("Timeout connecting to port 5060, is this a Bticino intercom? Did you change /etc/init.d/flexisipsh to make it listen on this port?") ) );
|
||||
conn.on('error', () => reject( new Error("Error connecting to port 5060, is this a Bticino intercom? Did you change /etc/init.d/flexisipsh to make it listen on this port?") ) );
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Will validate if the c300x-controller is running on port 8080.
|
||||
* The c300x-controller will return errors if some configuration errors are present on the intercom.
|
||||
*/
|
||||
private static validateController( ipAddress : string ) : Promise<void> {
|
||||
// Will throw an exception if invalid format
|
||||
const c300x = nodeIp.toBuffer( ipAddress )
|
||||
const validatedIp = nodeIp.toString(c300x)
|
||||
|
||||
const url = `http://${validatedIp}:8080/validate-setup?raw=true`
|
||||
|
||||
return new Promise( (resolve, reject) => get(url, (res) => {
|
||||
let body = "";
|
||||
res.on("data", data => { body += data });
|
||||
res.on("end", () => {
|
||||
try {
|
||||
let parsedBody = JSON.parse( body )
|
||||
if( parsedBody["errors"].length > 0 ) {
|
||||
reject( new Error( parsedBody["errors"][0] ) )
|
||||
} else {
|
||||
parsedBody["ipAddress"] = validatedIp
|
||||
resolve( parsedBody )
|
||||
}
|
||||
} catch( e ) {
|
||||
reject( e )
|
||||
}
|
||||
})
|
||||
res.on("error", (e) => { reject(e)})
|
||||
if( res.statusCode != 200 ) {
|
||||
reject( new Error(`Could not validate required c300x-controller. Check ${url}`) )
|
||||
}
|
||||
} ).on("error", (e) => { reject(`Could not connect to the c300x-controller at ${url}`) }) )
|
||||
}
|
||||
|
||||
/**
|
||||
* This verifies if the intercom is customized correctly. It verifies:
|
||||
*
|
||||
* - if a dedicated scrypted sip user is added for this specific camera instance in /etc/flexisip/users/users.db.txt
|
||||
* - if this dedicated scrypted sip user is configured in /etc/flexisip/users/route.conf and /etc/flexisip/users/route_int.conf
|
||||
*/
|
||||
public registerEndpoints( verifyUser : boolean ) {
|
||||
let ipAddress = SipHelper.getIntercomIp(this.sipCamera)
|
||||
let sipFrom = SipHelper.getIdentifier(this.sipCamera)
|
||||
const pressed = Buffer.from(this.sipCamera.doorbellWebhookUrl + 'pressed').toString('base64')
|
||||
const locked = Buffer.from(this.sipCamera.doorbellLockWebhookUrl + 'locked').toString('base64')
|
||||
const unlocked = Buffer.from(this.sipCamera.doorbellLockWebhookUrl + 'unlocked').toString('base64')
|
||||
get(`http://${ipAddress}:8080/register-endpoint?raw=true&identifier=${sipFrom}&pressed=${pressed}&locked=${locked}&unlocked=${unlocked}&verifyUser=${verifyUser}`, (res) => {
|
||||
if( verifyUser ) {
|
||||
let body = "";
|
||||
res.on("data", data => { body += data });
|
||||
res.on("end", () => {
|
||||
try {
|
||||
let parsedBody = JSON.parse( body )
|
||||
if( parsedBody["errors"].length > 0 ) {
|
||||
this.sipCamera.log.a("This camera is not setup correctly, it will not be able to receive the incoming doorbell stream. Check the console for the errors.")
|
||||
parsedBody["errors"].forEach( error => {
|
||||
this.sipCamera.console.error( "ERROR: " + error )
|
||||
});
|
||||
}
|
||||
} catch( e ) {
|
||||
this.sipCamera.console.error("Error parsing body to JSON: " + body )
|
||||
}
|
||||
})
|
||||
}
|
||||
console.log("Endpoint registration status: " + res.statusCode)
|
||||
});
|
||||
|
||||
// The default evict time on the c300x-controller is 5 minutes, so this will certainly be within bounds
|
||||
this.timeout = setTimeout( () => this.registerEndpoints( false ) , 2 * 60 * 1000 )
|
||||
}
|
||||
|
||||
/**
|
||||
* Informs the c300x-controller where to send the stream to
|
||||
*/
|
||||
public updateStreamEndpoint() : Promise<void> {
|
||||
let ipAddress = SipHelper.getIntercomIp(this.sipCamera)
|
||||
let sipFrom = SipHelper.getIdentifier(this.sipCamera)
|
||||
return new Promise( (resolve, reject) => get(`http://${ipAddress}:8080/register-endpoint?raw=true&updateStreamEndpoint=${sipFrom}`, (res) => {
|
||||
if( res.statusCode != 200 ) reject( "ERROR: Could not update streaming endpoint, call returned: " + res.statusCode )
|
||||
else resolve()
|
||||
} ) );
|
||||
}
|
||||
|
||||
public cancelTimer() {
|
||||
if( this.timeout ) {
|
||||
clearTimeout(this.timeout)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import sdk, { Device, DeviceCreator, DeviceCreatorSettings, DeviceProvider, LockState, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, Setting } from '@scrypted/sdk'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { BticinoSipCamera } from './bticino-camera'
|
||||
import { ControllerApi } from './c300x-controller-api';
|
||||
|
||||
const { systemManager, deviceManager } = sdk
|
||||
|
||||
@@ -14,41 +15,60 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
|
||||
key: 'newCamera',
|
||||
title: 'Add Camera',
|
||||
placeholder: 'Camera name, e.g.: Back Yard Camera, Baby Camera, etc',
|
||||
}
|
||||
},
|
||||
{
|
||||
key: 'ip',
|
||||
title: 'IP Address',
|
||||
placeholder: 'IP Address of the C300X intercom',
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
async createDevice(settings: DeviceCreatorSettings): Promise<string> {
|
||||
const nativeId = randomBytes(4).toString('hex')
|
||||
const name = settings.newCamera?.toString()
|
||||
const camera = await this.updateDevice(nativeId, name)
|
||||
|
||||
const device: Device = {
|
||||
providerNativeId: nativeId,
|
||||
info: {
|
||||
//model: `${camera.model} (${camera.data.kind})`,
|
||||
manufacturer: 'BticinoPlugin',
|
||||
//firmware: camera.data.firmware_version,
|
||||
//serialNumber: camera.data.device_id
|
||||
},
|
||||
nativeId: nativeId + '-lock',
|
||||
name: name + ' Lock',
|
||||
type: ScryptedDeviceType.Lock,
|
||||
interfaces: [ScryptedInterface.Lock, ScryptedInterface.HttpRequestHandler],
|
||||
if( !settings.ip ) {
|
||||
throw new Error('IP address is required!')
|
||||
}
|
||||
|
||||
const ret = await deviceManager.onDevicesChanged({
|
||||
providerNativeId: nativeId,
|
||||
devices: [device],
|
||||
let validate = ControllerApi.validate( settings.ip )
|
||||
|
||||
return validate.then( async (setupData) => {
|
||||
const nativeId = randomBytes(4).toString('hex')
|
||||
const name = settings.newCamera?.toString() === undefined ? "Doorbell" : settings.newCamera?.toString()
|
||||
await this.updateDevice(nativeId, name)
|
||||
|
||||
const device: Device = {
|
||||
providerNativeId: nativeId,
|
||||
info: {
|
||||
//model: `${camera.model} (${camera.data.kind})`,
|
||||
manufacturer: 'BticinoPlugin',
|
||||
//firmware: camera.data.firmware_version,
|
||||
//serialNumber: camera.data.device_id
|
||||
},
|
||||
nativeId: nativeId + '-lock',
|
||||
name: name + ' Lock',
|
||||
type: ScryptedDeviceType.Lock,
|
||||
interfaces: [ScryptedInterface.Lock, ScryptedInterface.HttpRequestHandler],
|
||||
}
|
||||
|
||||
await deviceManager.onDevicesChanged({
|
||||
providerNativeId: nativeId,
|
||||
devices: [device],
|
||||
})
|
||||
|
||||
let sipCamera : BticinoSipCamera = await this.getDevice(nativeId)
|
||||
|
||||
sipCamera.putSetting("sipfrom", "scrypted-" + sipCamera.id + "@127.0.0.1")
|
||||
sipCamera.putSetting("sipto", "c300x@" + setupData["ipAddress"] )
|
||||
sipCamera.putSetting("sipdomain", setupData["domain"])
|
||||
sipCamera.putSetting("sipdebug", true )
|
||||
|
||||
systemManager.getDeviceById<BticinoSipCamera>(sipCamera.id)
|
||||
|
||||
let lock = await sipCamera.getDevice(undefined)
|
||||
lock.lockState = LockState.Locked
|
||||
|
||||
return nativeId
|
||||
})
|
||||
|
||||
let sipCamera : BticinoSipCamera = await this.getDevice(nativeId)
|
||||
let foo : BticinoSipCamera = systemManager.getDeviceById<BticinoSipCamera>(sipCamera.id)
|
||||
|
||||
let lock = await sipCamera.getDevice(undefined)
|
||||
lock.lockState = LockState.Locked
|
||||
|
||||
return nativeId
|
||||
}
|
||||
|
||||
updateDevice(nativeId: string, name: string) {
|
||||
@@ -69,7 +89,8 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
|
||||
ScryptedInterface.BinarySensor,
|
||||
ScryptedDeviceType.DeviceProvider,
|
||||
ScryptedInterface.HttpRequestHandler,
|
||||
ScryptedInterface.VideoClips
|
||||
ScryptedInterface.VideoClips,
|
||||
ScryptedInterface.Reboot
|
||||
],
|
||||
type: ScryptedDeviceType.Doorbell,
|
||||
})
|
||||
@@ -86,7 +107,6 @@ export class BticinoSipPlugin extends ScryptedDeviceBase implements DeviceProvid
|
||||
async releaseDevice(id: string, nativeId: string): Promise<void> {
|
||||
let camera = this.devices.get(nativeId)
|
||||
if( camera ) {
|
||||
camera.voicemailHandler.cancelVoicemailCheck()
|
||||
if( this.devices.delete( nativeId ) ) {
|
||||
this.console.log("Removed device from list: " + id + " / " + nativeId )
|
||||
}
|
||||
|
||||
@@ -14,10 +14,11 @@ export class PersistentSipManager {
|
||||
private sipManager : SipManager
|
||||
private lastRegistration : number = 0
|
||||
private expireInterval : number = 0
|
||||
private timeout : NodeJS.Timeout
|
||||
|
||||
constructor( private camera : BticinoSipCamera ) {
|
||||
// Give it a second and run in seperate thread to avoid failure on creation for from/to/domain check
|
||||
setTimeout( () => this.enable() , CHECK_INTERVAL )
|
||||
this.timeout = setTimeout( () => this.enable() , CHECK_INTERVAL )
|
||||
}
|
||||
|
||||
async enable() : Promise<SipManager> {
|
||||
@@ -56,7 +57,7 @@ export class PersistentSipManager {
|
||||
this.lastRegistration = now + (60 * 1000) - this.expireInterval
|
||||
throw e
|
||||
} finally {
|
||||
setTimeout( () => this.register(), CHECK_INTERVAL )
|
||||
this.timeout = setTimeout( () => this.register(), CHECK_INTERVAL )
|
||||
}
|
||||
}
|
||||
|
||||
@@ -65,6 +66,12 @@ export class PersistentSipManager {
|
||||
return SipCallSession.createCallSession(this.camera.console, "Bticino", sipOptions, sm )
|
||||
}
|
||||
|
||||
cancelTimer() {
|
||||
if( this.timeout ) {
|
||||
clearTimeout(this.timeout)
|
||||
}
|
||||
}
|
||||
|
||||
reloadSipOptions() {
|
||||
this.sipManager?.setSipOptions( null )
|
||||
}
|
||||
|
||||
@@ -39,6 +39,15 @@ export class SipHelper {
|
||||
}
|
||||
}
|
||||
|
||||
public static getIdentifier( camera : BticinoSipCamera ) : string {
|
||||
let to = camera.storage.getItem('sipfrom')?.trim();
|
||||
const domain = camera.storage.getItem('sipdomain')?.trim()
|
||||
if( to ) {
|
||||
return to.split('@')[0] + '%40' + domain;
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
public static getIntercomIp( camera : BticinoSipCamera ): string {
|
||||
let to = camera.storage.getItem('sipto')?.trim();
|
||||
if( to ) {
|
||||
|
||||
4
plugins/core/package-lock.json
generated
4
plugins/core/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.1.128",
|
||||
"version": "0.1.129",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.1.128",
|
||||
"version": "0.1.129",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/core",
|
||||
"version": "0.1.128",
|
||||
"version": "0.1.129",
|
||||
"description": "Scrypted Core plugin. Provides the UI, websocket, and engine.io APIs.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
<v-card-text>
|
||||
<v-card-title style="justify-content: center;" class="headline text-uppercase">Scrypted
|
||||
</v-card-title>
|
||||
<v-card-subtitle v-if="$store.state.hasLogin === false" style="justify-content: center;" class="text-uppercase">Create Account
|
||||
</v-card-subtitle>
|
||||
<v-container grid-list-md>
|
||||
<v-layout wrap>
|
||||
<v-flex xs12>
|
||||
@@ -34,12 +36,13 @@
|
||||
<v-card-actions>
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on }">
|
||||
<v-btn v-on="on" icon href="https://twitter.com/scryptedapp/">
|
||||
<v-icon small>fab fa-twitter</v-icon>
|
||||
<v-btn v-on="on" icon href="https://discord.gg/DcFzmBHYGq">
|
||||
<v-icon small>fab fa-discord</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<span>Twitter</span>
|
||||
<span>Discord</span>
|
||||
</v-tooltip>
|
||||
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on }">
|
||||
<v-btn v-on="on" icon href="https://www.reddit.com/r/Scrypted/">
|
||||
@@ -48,6 +51,7 @@
|
||||
</template>
|
||||
<span>Reddit</span>
|
||||
</v-tooltip>
|
||||
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on }">
|
||||
<v-btn v-on="on" icon href="https://github.com/koush/scrypted">
|
||||
@@ -56,14 +60,7 @@
|
||||
</template>
|
||||
<span>Github</span>
|
||||
</v-tooltip>
|
||||
<v-tooltip bottom>
|
||||
<template v-slot:activator="{ on }">
|
||||
<v-btn v-on="on" icon href="https://discord.gg/DcFzmBHYGq">
|
||||
<v-icon small>fab fa-discord</v-icon>
|
||||
</v-btn>
|
||||
</template>
|
||||
<span>Discord</span>
|
||||
</v-tooltip>
|
||||
|
||||
<v-spacer></v-spacer>
|
||||
<v-btn type="submit" text @click.prevent="doLogin">Log In</v-btn>
|
||||
</v-card-actions>
|
||||
|
||||
@@ -50,6 +50,17 @@
|
||||
<v-list-item-title>Discord</v-list-item-title>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item link href="https://www.reddit.com/r/Scrypted/" active-class="purple white--text tile">
|
||||
<v-list-item-icon>
|
||||
<v-icon small>fab fa-reddit</v-icon>
|
||||
</v-list-item-icon>
|
||||
|
||||
<v-list-item-content>
|
||||
<v-list-item-title>Reddit</v-list-item-title>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
|
||||
<v-list-item link href="https://github.com/koush/scrypted" active-class="purple white--text tile">
|
||||
<v-list-item-icon>
|
||||
<v-icon small>fab fa-github</v-icon>
|
||||
@@ -59,6 +70,7 @@
|
||||
<v-list-item-title>Github</v-list-item-title>
|
||||
</v-list-item-content>
|
||||
</v-list-item>
|
||||
|
||||
<v-divider></v-divider>
|
||||
<v-list-item active-class="deep-purple accent-4 white--text">
|
||||
<v-list-item-icon>
|
||||
@@ -137,20 +149,31 @@ export default {
|
||||
getComponentViewPath,
|
||||
async checkUpdateAvailable() {
|
||||
await this.$connectingScrypted;
|
||||
const info = await this.$scrypted.systemManager.getComponent("info");
|
||||
const version = await info.getVersion();
|
||||
this.currentVersion = version;
|
||||
const { updateAvailable } = await checkUpdate(
|
||||
"@scrypted/server",
|
||||
version
|
||||
const serviceControl = await this.$scrypted.systemManager.getComponent(
|
||||
"service-control"
|
||||
);
|
||||
this.updateAvailable = updateAvailable;
|
||||
if (updateAvailable) {
|
||||
try {
|
||||
this.updateAvailable = await serviceControl.getUpdateAvailable();
|
||||
}
|
||||
catch (e) {
|
||||
// old scrypted servers dont support this call, or it may be unimplemented
|
||||
// in which case fall back and determine what the install type is.
|
||||
const info = await this.$scrypted.systemManager.getComponent("info");
|
||||
const version = await info.getVersion();
|
||||
this.currentVersion = version;
|
||||
const { updateAvailable } = await checkUpdate(
|
||||
"@scrypted/server",
|
||||
version
|
||||
);
|
||||
this.updateAvailable = updateAvailable;
|
||||
}
|
||||
|
||||
if (this.updateAvailable) {
|
||||
const logger = this.$scrypted.deviceManager.getDeviceLogger();
|
||||
const u = new URL(window.location)
|
||||
u.hash = "#/component/settings";
|
||||
logger.clearAlerts();
|
||||
logger.a(`Scrypted Server update available: ${updateAvailable}. ${u}`);
|
||||
logger.a(`Scrypted Server update available: ${this.updateAvailable}. ${u}`);
|
||||
}
|
||||
},
|
||||
filterComponents: function (category) {
|
||||
|
||||
@@ -130,17 +130,28 @@ export default {
|
||||
const info = await this.$scrypted.systemManager.getComponent("info");
|
||||
const version = await info.getVersion();
|
||||
this.currentVersion = version;
|
||||
const { updateAvailable } = await checkUpdate(
|
||||
"@scrypted/server",
|
||||
version
|
||||
|
||||
const serviceControl = await this.$scrypted.systemManager.getComponent(
|
||||
"service-control"
|
||||
);
|
||||
this.updateAvailable = updateAvailable;
|
||||
try {
|
||||
this.updateAvailable = await serviceControl.getUpdateAvailable();
|
||||
}
|
||||
catch (e) {
|
||||
// old scrypted servers dont support this call, or it may be unimplemented
|
||||
// in which case fall back and determine what the install type is.
|
||||
const { updateAvailable } = await checkUpdate(
|
||||
"@scrypted/server",
|
||||
version
|
||||
);
|
||||
this.updateAvailable = updateAvailable;
|
||||
}
|
||||
},
|
||||
async loadEnv() {
|
||||
const info = await this.$scrypted.systemManager.getComponent("info");
|
||||
const env = await info.getScryptedEnv();
|
||||
this.showRestart = !!env.SCRYPTED_CAN_RESTART;
|
||||
this.canUpdate = !!env.SCRYPTED_NPM_SERVE || !!env.SCRYPTED_WEBHOOK_UPDATE;
|
||||
this.canUpdate = !!env.SCRYPTED_NPM_SERVE || !!env.SCRYPTED_WEBHOOK_UPDATE || !!env.SCRYPTED_CAN_UPDATE;
|
||||
},
|
||||
async doRestart() {
|
||||
this.restartStatus = "Restarting...";
|
||||
|
||||
2
plugins/homekit/.vscode/settings.json
vendored
2
plugins/homekit/.vscode/settings.json
vendored
@@ -1,4 +1,4 @@
|
||||
|
||||
{
|
||||
"scrypted.debugHost": "127.0.0.1"
|
||||
"scrypted.debugHost": "koushik-ubuntu"
|
||||
}
|
||||
4
plugins/homekit/package-lock.json
generated
4
plugins/homekit/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/homekit",
|
||||
"version": "1.2.25",
|
||||
"version": "1.2.27",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/homekit",
|
||||
"version": "1.2.25",
|
||||
"version": "1.2.27",
|
||||
"dependencies": {
|
||||
"@koush/werift-src": "file:../../external/werift",
|
||||
"check-disk-space": "^3.3.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/homekit",
|
||||
"version": "1.2.25",
|
||||
"version": "1.2.27",
|
||||
"description": "HomeKit Plugin for Scrypted",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
|
||||
119
plugins/homekit/src/types/airpurifier.ts
Normal file
119
plugins/homekit/src/types/airpurifier.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { ScryptedDevice, ScryptedDeviceType, ScryptedInterface, AirPurifierStatus, AirPurifierMode, AirPurifier, FilterMaintenance } from '@scrypted/sdk';
|
||||
import { addSupportedType, bindCharacteristic, DummyDevice, } from '../common';
|
||||
import { Characteristic, CharacteristicEventTypes, CharacteristicSetCallback, CharacteristicValue, Service } from '../hap';
|
||||
import { makeAccessory } from './common';
|
||||
import type { HomeKitPlugin } from "../main";
|
||||
|
||||
addSupportedType({
|
||||
type: ScryptedDeviceType.AirPurifier,
|
||||
probe(device: DummyDevice): boolean {
|
||||
return device.interfaces.includes(ScryptedInterface.AirPurifier);
|
||||
},
|
||||
getAccessory: async (device: ScryptedDevice & AirPurifier & FilterMaintenance, homekitPlugin: HomeKitPlugin) => {
|
||||
const accessory = makeAccessory(device, homekitPlugin);
|
||||
|
||||
const service = accessory.addService(Service.AirPurifier, device.name);
|
||||
const nightModeService = accessory.addService(Service.Switch, `${device.name} Night Mode`)
|
||||
|
||||
/* On/Off AND mode toggle */
|
||||
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.Active,
|
||||
() => {
|
||||
switch(device.airPurifierState.status) {
|
||||
case AirPurifierStatus.Active:
|
||||
return Characteristic.Active.ACTIVE;
|
||||
case AirPurifierStatus.ActiveNightMode:
|
||||
return Characteristic.Active.ACTIVE;
|
||||
}
|
||||
return Characteristic.Active.INACTIVE;
|
||||
});
|
||||
|
||||
service.getCharacteristic(Characteristic.Active)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
device.setAirPurifierState({
|
||||
status: (value as boolean) ? AirPurifierStatus.Active : AirPurifierStatus.Inactive,
|
||||
})
|
||||
});
|
||||
|
||||
/* Current State */
|
||||
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.CurrentAirPurifierState,
|
||||
() => {
|
||||
switch (device.airPurifierState.status) {
|
||||
case AirPurifierStatus.Inactive:
|
||||
return Characteristic.CurrentAirPurifierState.INACTIVE;
|
||||
case AirPurifierStatus.Idle:
|
||||
return Characteristic.CurrentAirPurifierState.IDLE;
|
||||
}
|
||||
return Characteristic.CurrentAirPurifierState.PURIFYING_AIR;
|
||||
});
|
||||
|
||||
/* Fan Speed */
|
||||
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.RotationSpeed,
|
||||
() => device.airPurifierState.speed);
|
||||
|
||||
service.getCharacteristic(Characteristic.RotationSpeed)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
device.setAirPurifierState({
|
||||
speed: value,
|
||||
})
|
||||
})
|
||||
|
||||
/* i.e. Mode: Manual/Auto slider */
|
||||
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.TargetAirPurifierState,
|
||||
() => {
|
||||
if (device.airPurifierState.mode == AirPurifierMode.Automatic)
|
||||
return Characteristic.TargetAirPurifierState.AUTO;
|
||||
return Characteristic.TargetAirPurifierState.MANUAL;
|
||||
});
|
||||
|
||||
service.getCharacteristic(Characteristic.TargetAirPurifierState)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
device.setAirPurifierState({
|
||||
mode: value === Characteristic.TargetAirPurifierState.AUTO ? AirPurifierMode.Automatic : AirPurifierMode.Manual,
|
||||
})
|
||||
});
|
||||
|
||||
/* LockPhysicalControls i.e. "Child Lock: Unlocked/Locked" */
|
||||
bindCharacteristic(device, ScryptedInterface.AirPurifier, service, Characteristic.LockPhysicalControls,
|
||||
() => !!device.airPurifierState.lockPhysicalControls);
|
||||
|
||||
service.getCharacteristic(Characteristic.LockPhysicalControls)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
device.setAirPurifierState({
|
||||
lockPhysicalControls: (value as boolean),
|
||||
})
|
||||
})
|
||||
|
||||
/* Night mode switch */
|
||||
bindCharacteristic(device, ScryptedInterface.AirPurifier, nightModeService, Characteristic.On,
|
||||
() => !!(device.airPurifierState.status === AirPurifierStatus.ActiveNightMode));
|
||||
|
||||
nightModeService.getCharacteristic(Characteristic.On)
|
||||
.on(CharacteristicEventTypes.SET, (value: CharacteristicValue, callback: CharacteristicSetCallback) => {
|
||||
callback();
|
||||
device.setAirPurifierState({
|
||||
status: value ? AirPurifierStatus.ActiveNightMode : AirPurifierStatus.Active,
|
||||
})
|
||||
})
|
||||
|
||||
/* Optional: Filter Maintenance Service */
|
||||
if (device.interfaces.includes(ScryptedInterface.FilterMaintenance)) {
|
||||
const filterMaintenanceService = accessory.addService(Service.FilterMaintenance, device.name);
|
||||
|
||||
bindCharacteristic(device, ScryptedInterface.FilterMaintenance, filterMaintenanceService, Characteristic.FilterLifeLevel,
|
||||
() => device.filterLifeLevel)
|
||||
|
||||
bindCharacteristic(device, ScryptedInterface.FilterMaintenance, filterMaintenanceService, Characteristic.FilterChangeIndication,
|
||||
() => {
|
||||
if (device.filterChangeIndication)
|
||||
return Characteristic.FilterChangeIndication.CHANGE_FILTER;
|
||||
return Characteristic.FilterChangeIndication.FILTER_OK;
|
||||
})
|
||||
}
|
||||
|
||||
return accessory;
|
||||
}
|
||||
});
|
||||
@@ -8,12 +8,32 @@ import { probe } from './onoff-base';
|
||||
addSupportedType({
|
||||
type: ScryptedDeviceType.Fan,
|
||||
probe(device: DummyDevice) {
|
||||
if (device.interfaces.includes(ScryptedInterface.OnOff))
|
||||
return true;
|
||||
if (!device.interfaces.includes(ScryptedInterface.Fan))
|
||||
return false;
|
||||
return true;
|
||||
},
|
||||
getAccessory: async (device: ScryptedDevice & TemperatureSetting & Thermometer & HumiditySensor & OnOff & Fan & HumiditySetting & AirQualitySensor & PM10Sensor & PM25Sensor & VOCSensor & NOXSensor & CO2Sensor, homekitPlugin: HomeKitPlugin) => {
|
||||
const accessory = makeAccessory(device, homekitPlugin);
|
||||
|
||||
// simple on/off fan.
|
||||
if (!device.interfaces.includes(ScryptedInterface.Fan)) {
|
||||
const fanService = accessory.addService(Service.Fan);
|
||||
bindCharacteristic(device, ScryptedInterface.OnOff, fanService, Characteristic.On,
|
||||
() => !!device.on);
|
||||
|
||||
fanService.getCharacteristic(Characteristic.On).on(CharacteristicEventTypes.SET, (value, callback) => {
|
||||
callback();
|
||||
if (value)
|
||||
device.turnOn();
|
||||
else
|
||||
device.turnOff();
|
||||
});
|
||||
|
||||
return accessory;
|
||||
}
|
||||
|
||||
const service = addFan(device, accessory);
|
||||
service.setPrimaryService();
|
||||
|
||||
|
||||
@@ -15,3 +15,4 @@ import './vacuum';
|
||||
import './outlet';
|
||||
import './notifier';
|
||||
import './windowcovering'
|
||||
import './airpurifier'
|
||||
|
||||
4
plugins/objectdetector/package-lock.json
generated
4
plugins/objectdetector/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.140",
|
||||
"version": "0.0.141",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.140",
|
||||
"version": "0.0.141",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.140",
|
||||
"version": "0.0.141",
|
||||
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -414,15 +414,15 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
for (const mixin of detectorMixin.currentMixins.values()) {
|
||||
if (mixin.id !== this.id)
|
||||
continue;
|
||||
for (const [key, zi] of Object.entries(mixin.zoneInfos)) {
|
||||
const zone = mixin.zones[key];
|
||||
for (const [key, zone] of Object.entries(mixin.zones)) {
|
||||
const zi = mixin.zoneInfos[key];
|
||||
if (!zone?.length || zone?.length < 3)
|
||||
continue;
|
||||
const odz: ObjectDetectionZone = {
|
||||
classes: mixin.hasMotionType ? ['motion'] : zi.classes,
|
||||
exclusion: zi.exclusion,
|
||||
classes: mixin.hasMotionType ? ['motion'] : zi?.classes,
|
||||
exclusion: zi?.exclusion,
|
||||
path: zone,
|
||||
type: zi.type,
|
||||
type: zi?.type,
|
||||
}
|
||||
zones.push(odz);
|
||||
}
|
||||
|
||||
4
plugins/opencv/package-lock.json
generated
4
plugins/opencv/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/opencv",
|
||||
"version": "0.0.81",
|
||||
"version": "0.0.85",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/opencv",
|
||||
"version": "0.0.81",
|
||||
"version": "0.0.85",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -26,8 +26,7 @@
|
||||
"runtime": "python",
|
||||
"type": "API",
|
||||
"interfaces": [
|
||||
"ObjectDetection",
|
||||
"Settings"
|
||||
"ObjectDetection"
|
||||
],
|
||||
"pluginDependencies": [
|
||||
"@scrypted/objectdetector",
|
||||
@@ -37,5 +36,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.0.81"
|
||||
"version": "0.0.85"
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ class OpenCVDetectionSession:
|
||||
self.lastFrame = 0
|
||||
|
||||
|
||||
defaultThreshold = 25
|
||||
defaultThreshold = 50
|
||||
defaultArea = 200
|
||||
defaultBlur = 5
|
||||
|
||||
|
||||
8
plugins/openvino/.vscode/settings.json
vendored
8
plugins/openvino/.vscode/settings.json
vendored
@@ -1,16 +1,16 @@
|
||||
|
||||
{
|
||||
// docker installation
|
||||
// "scrypted.debugHost": "koushik-ubuntu",
|
||||
// "scrypted.serverRoot": "/server",
|
||||
"scrypted.debugHost": "koushik-ubuntu",
|
||||
"scrypted.serverRoot": "/server",
|
||||
|
||||
// pi local installation
|
||||
// "scrypted.debugHost": "192.168.2.119",
|
||||
// "scrypted.serverRoot": "/home/pi/.scrypted",
|
||||
|
||||
// local checkout
|
||||
"scrypted.debugHost": "127.0.0.1",
|
||||
"scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "127.0.0.1",
|
||||
// "scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "koushik-windows",
|
||||
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
|
||||
|
||||
|
||||
4
plugins/openvino/package-lock.json
generated
4
plugins/openvino/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/openvino",
|
||||
"version": "0.1.18",
|
||||
"version": "0.1.22",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/openvino",
|
||||
"version": "0.1.18",
|
||||
"version": "0.1.22",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -33,11 +33,12 @@
|
||||
"runtime": "python",
|
||||
"type": "API",
|
||||
"interfaces": [
|
||||
"ObjectDetection"
|
||||
"ObjectDetection",
|
||||
"Settings"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.18"
|
||||
"version": "0.1.22"
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from typing import Any, Tuple
|
||||
import openvino.runtime as ov
|
||||
import scrypted_sdk
|
||||
from PIL import Image
|
||||
from scrypted_sdk.other import SettingValue
|
||||
from scrypted_sdk.types import Setting
|
||||
|
||||
from predict import PredictPlugin, Prediction, Rectangle
|
||||
@@ -39,7 +40,15 @@ class OpenVINOPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.S
|
||||
mappingFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/ssd_mobilenet_v1_coco/FP16/ssd_mobilenet_v1_coco.mapping', 'ssd_mobilenet_v1_coco.mapping')
|
||||
labelsFile = self.downloadFile('https://raw.githubusercontent.com/koush/openvino-models/main/ssd_mobilenet_v1_coco/FP16/ssd_mobilenet_v1_coco.bin', 'ssd_mobilenet_v1_coco.bin')
|
||||
|
||||
self.compiled_model = self.core.compile_model(xmlFile, "AUTO")
|
||||
mode = self.storage.getItem('mode') or 'AUTO'
|
||||
try:
|
||||
self.compiled_model = self.core.compile_model(xmlFile, mode)
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print("Reverting to AUTO mode.")
|
||||
self.storage.removeItem('mode')
|
||||
asyncio.run_coroutine_threadsafe(scrypted_sdk.deviceManager.requestRestart(), asyncio.get_event_loop())
|
||||
|
||||
labelsFile = self.downloadFile('https://raw.githubusercontent.com/google-coral/test_data/master/coco_labels.txt', 'coco_labels.txt')
|
||||
labels_contents = open(labelsFile, 'r').read()
|
||||
@@ -48,7 +57,25 @@ class OpenVINOPlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.S
|
||||
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=1, thread_name_prefix="openvino", )
|
||||
|
||||
async def getSettings(self) -> list[Setting]:
|
||||
return []
|
||||
mode = self.storage.getItem('mode') or 'AUTO'
|
||||
return [
|
||||
{
|
||||
'key': 'mode',
|
||||
'title': 'Mode',
|
||||
'description': 'AUTO, CPU, or GPU mode to use for detections. Requires plugin reload. Use CPU if the system has unreliable GPU drivers.',
|
||||
'choices': [
|
||||
'AUTO',
|
||||
'CPU',
|
||||
'GPU',
|
||||
],
|
||||
'value': mode,
|
||||
}
|
||||
]
|
||||
|
||||
async def putSetting(self, key: str, value: SettingValue):
|
||||
self.storage.setItem(key, value)
|
||||
await self.onDeviceEvent(scrypted_sdk.ScryptedInterface.Settings.value, None)
|
||||
await scrypted_sdk.deviceManager.requestRestart()
|
||||
|
||||
# width, height, channels
|
||||
def get_input_details(self) -> Tuple[int, int, int]:
|
||||
|
||||
4
plugins/prebuffer-mixin/package-lock.json
generated
4
plugins/prebuffer-mixin/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/prebuffer-mixin",
|
||||
"version": "0.9.90",
|
||||
"version": "0.9.92",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/prebuffer-mixin",
|
||||
"version": "0.9.90",
|
||||
"version": "0.9.92",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/prebuffer-mixin",
|
||||
"version": "0.9.90",
|
||||
"version": "0.9.92",
|
||||
"description": "Video Stream Rebroadcast, Prebuffer, and Management Plugin for Scrypted.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
|
||||
@@ -29,14 +29,6 @@ import { TRANSCODE_MIXIN_PROVIDER_NATIVE_ID, TranscodeMixinProvider, getTranscod
|
||||
const { mediaManager, log, systemManager, deviceManager } = sdk;
|
||||
|
||||
const prebufferDurationMs = 10000;
|
||||
const DEFAULT_AUDIO = 'Default';
|
||||
const AAC_AUDIO = 'AAC or No Audio';
|
||||
const AAC_AUDIO_DESCRIPTION = `${AAC_AUDIO} (Copy)`;
|
||||
const COMPATIBLE_AUDIO = 'Compatible Audio'
|
||||
const COMPATIBLE_AUDIO_DESCRIPTION = `${COMPATIBLE_AUDIO} (Copy)`;
|
||||
const TRANSCODE_AUDIO = 'Other Audio';
|
||||
const TRANSCODE_AUDIO_DESCRIPTION = `${TRANSCODE_AUDIO} (Transcode)`;
|
||||
const COMPATIBLE_AUDIO_CODECS = ['aac', 'mp3', 'mp2', 'opus'];
|
||||
const DEFAULT_FFMPEG_INPUT_ARGUMENTS = '-fflags +genpts';
|
||||
|
||||
const SCRYPTED_PARSER_TCP = 'Scrypted (TCP)';
|
||||
@@ -45,12 +37,6 @@ const FFMPEG_PARSER_TCP = 'FFmpeg (TCP)';
|
||||
const FFMPEG_PARSER_UDP = 'FFmpeg (UDP)';
|
||||
const STRING_DEFAULT = 'Default';
|
||||
|
||||
const VALID_AUDIO_CONFIGS = [
|
||||
AAC_AUDIO,
|
||||
COMPATIBLE_AUDIO,
|
||||
TRANSCODE_AUDIO,
|
||||
];
|
||||
|
||||
interface PrebufferStreamChunk extends StreamChunk {
|
||||
time?: number;
|
||||
}
|
||||
@@ -221,27 +207,6 @@ class PrebufferSession {
|
||||
this.parserSessionPromise.then(pso => pso.killed.finally(() => this.parserSessionPromise = undefined));
|
||||
}
|
||||
|
||||
getAudioConfig(): {
|
||||
isUsingDefaultAudioConfig: boolean,
|
||||
aacAudio: boolean,
|
||||
compatibleAudio: boolean,
|
||||
reencodeAudio: boolean,
|
||||
} {
|
||||
let audioConfig = this.storage.getItem(this.audioConfigurationKey) || '';
|
||||
if (!VALID_AUDIO_CONFIGS.find(config => audioConfig.startsWith(config)))
|
||||
audioConfig = '';
|
||||
const aacAudio = audioConfig.indexOf(AAC_AUDIO) !== -1;
|
||||
const compatibleAudio = audioConfig.indexOf(COMPATIBLE_AUDIO) !== -1;
|
||||
// reencode audio will be used if explicitly set.
|
||||
const reencodeAudio = audioConfig.indexOf(TRANSCODE_AUDIO) !== -1;
|
||||
return {
|
||||
isUsingDefaultAudioConfig: !(aacAudio || compatibleAudio || reencodeAudio),
|
||||
aacAudio,
|
||||
compatibleAudio,
|
||||
reencodeAudio,
|
||||
}
|
||||
}
|
||||
|
||||
canUseRtspParser(mediaStreamOptions: MediaStreamOptions) {
|
||||
return mediaStreamOptions?.container?.startsWith('rtsp');
|
||||
}
|
||||
@@ -398,7 +363,7 @@ class PrebufferSession {
|
||||
title: 'Detected Video/Audio Codecs',
|
||||
readonly: true,
|
||||
value: (session?.inputVideoCodec?.toString() || 'unknown') + '/' + (session?.inputAudioCodec?.toString() || 'unknown'),
|
||||
description: 'Configuring your camera to H264 video and Opus, PCM, or AAC audio is recommended.'
|
||||
description: 'Configuring your camera to H264 video, and audio to Opus or PCM-mulaw (G.711ulaw) is recommended.'
|
||||
},
|
||||
{
|
||||
key: 'detectedKeyframe',
|
||||
@@ -467,85 +432,23 @@ class PrebufferSession {
|
||||
const audioSoftMuted = mso?.audio === null;
|
||||
const advertisedAudioCodec = mso?.audio?.codec;
|
||||
|
||||
const { isUsingDefaultAudioConfig, aacAudio, compatibleAudio, reencodeAudio } = this.getAudioConfig();
|
||||
|
||||
let detectedAudioCodec = this.storage.getItem(this.lastDetectedAudioCodecKey) || undefined;
|
||||
if (detectedAudioCodec === 'null')
|
||||
detectedAudioCodec = null;
|
||||
|
||||
// the assumed audio codec is the detected codec first and the reported codec otherwise.
|
||||
const assumedAudioCodec = detectedAudioCodec === undefined
|
||||
? advertisedAudioCodec?.toLowerCase()
|
||||
: detectedAudioCodec?.toLowerCase();
|
||||
|
||||
|
||||
// after probing the audio codec is complete, alert the user with appropriate instructions.
|
||||
// assume the codec is user configurable unless the camera explictly reports otherwise.
|
||||
const audioIncompatible = !COMPATIBLE_AUDIO_CODECS.includes(assumedAudioCodec);
|
||||
|
||||
|
||||
// aac needs to have the adts header stripped for mpegts and mp4.
|
||||
// use this filter sparingly as it prevents ffmpeg from starting on a mismatch.
|
||||
// however, not using it on an aac stream also prevents ffmpeg from parsing.
|
||||
// so only use it when the detected or probe codec reports aac.
|
||||
const aacFilters = ['-bsf:a', 'aac_adtstoasc'];
|
||||
// compatible audio like mp3, mp2, opus can be muxed without issue.
|
||||
const compatibleFilters = [];
|
||||
|
||||
this.audioDisabled = false;
|
||||
let acodec: string[];
|
||||
|
||||
const detectedNoAudio = detectedAudioCodec === null;
|
||||
|
||||
// if the camera reports audio is incompatible and the user can't do anything about it
|
||||
// enable transcoding by default. however, still allow the user to change the settings
|
||||
// in case something changed.
|
||||
let mustTranscode = false;
|
||||
|
||||
|
||||
if (audioSoftMuted) {
|
||||
// no audio? explicitly disable it.
|
||||
acodec = ['-an'];
|
||||
this.audioDisabled = true;
|
||||
}
|
||||
else if (reencodeAudio || mustTranscode) {
|
||||
acodec = [
|
||||
'-bsf:a', 'aac_adtstoasc',
|
||||
'-acodec', 'aac',
|
||||
'-ar', `32k`,
|
||||
'-b:a', `32k`,
|
||||
'-ac', `1`,
|
||||
'-profile:a', 'aac_low',
|
||||
'-flags', '+global_header',
|
||||
];
|
||||
}
|
||||
else if (aacAudio || detectedNoAudio) {
|
||||
// NOTE: If there is no audio track, the aac filters will still work fine without complaints
|
||||
// from ffmpeg. This is why AAC and No Audio can be grouped into a single setting.
|
||||
// This is preferred, because failure and recovery is preferable to
|
||||
// permanently muting camera audio due to erroneous detection.
|
||||
acodec = [
|
||||
'-acodec',
|
||||
'copy',
|
||||
];
|
||||
acodec.push(...aacFilters);
|
||||
}
|
||||
else if (compatibleAudio) {
|
||||
acodec = [
|
||||
'-acodec',
|
||||
'copy',
|
||||
];
|
||||
acodec.push(...compatibleFilters);
|
||||
}
|
||||
else {
|
||||
acodec = [
|
||||
'-acodec',
|
||||
'copy',
|
||||
];
|
||||
|
||||
const filters = assumedAudioCodec === 'aac' ? aacFilters : compatibleFilters;
|
||||
|
||||
acodec.push(...filters);
|
||||
}
|
||||
|
||||
const vcodec = [
|
||||
@@ -707,6 +610,8 @@ class PrebufferSession {
|
||||
}, h264Oddities ? 60000 : 10000);
|
||||
}
|
||||
|
||||
await session.sdp;
|
||||
|
||||
// complain to the user about the codec if necessary. upstream may send a audio
|
||||
// stream but report none exists (to request muting).
|
||||
if (!audioSoftMuted && advertisedAudioCodec && session.inputAudioCodec !== undefined
|
||||
@@ -723,12 +628,6 @@ class PrebufferSession {
|
||||
if (!session.inputAudioCodec) {
|
||||
this.console.log('No audio stream detected.');
|
||||
}
|
||||
else if (!COMPATIBLE_AUDIO_CODECS.includes(session.inputAudioCodec?.toLowerCase())) {
|
||||
this.console.log('Detected audio codec is not mp4/mpegts compatible.', session.inputAudioCodec);
|
||||
}
|
||||
else {
|
||||
this.console.log('Detected audio codec is mp4/mpegts compatible.', session.inputAudioCodec);
|
||||
}
|
||||
|
||||
// set/update the detected codec, set it to null if no audio was found.
|
||||
this.storage.setItem(this.lastDetectedAudioCodecKey, session.inputAudioCodec || 'null');
|
||||
@@ -1115,18 +1014,9 @@ class PrebufferSession {
|
||||
|
||||
mediaStreamOptions.prebuffer = requestedPrebuffer;
|
||||
|
||||
const { reencodeAudio } = this.getAudioConfig();
|
||||
|
||||
if (this.audioDisabled) {
|
||||
mediaStreamOptions.audio = null;
|
||||
}
|
||||
else if (reencodeAudio) {
|
||||
mediaStreamOptions.audio = {
|
||||
codec: 'aac',
|
||||
encoder: 'aac',
|
||||
profile: 'aac_low',
|
||||
}
|
||||
}
|
||||
|
||||
if (session.inputVideoResolution?.width && session.inputVideoResolution?.height) {
|
||||
// this may be an audio only request.
|
||||
@@ -1416,7 +1306,7 @@ class PrebufferMixin extends SettingsMixinDeviceBase<VideoCamera> implements Vid
|
||||
const cloud = msos?.find(mso => mso.source === 'cloud');
|
||||
if (cloud) {
|
||||
this.storage.setItem('warnedCloud', 'true');
|
||||
log.a(`${this.name} is a cloud camera. Prebuffering maintains a persistent stream and will not enabled by default. You must enable the Prebuffer stream manually.`)
|
||||
log.a(`${this.name} is a cloud camera. Prebuffering maintains a persistent stream and will not be enabled by default. You must enable the Prebuffer stream manually.`)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
14
plugins/python-codecs/.vscode/settings.json
vendored
14
plugins/python-codecs/.vscode/settings.json
vendored
@@ -1,8 +1,8 @@
|
||||
|
||||
{
|
||||
// docker installation
|
||||
// "scrypted.debugHost": "koushik-ubuntu",
|
||||
// "scrypted.serverRoot": "/server",
|
||||
"scrypted.debugHost": "koushik-ubuntu",
|
||||
"scrypted.serverRoot": "/server",
|
||||
|
||||
// windows installation
|
||||
// "scrypted.debugHost": "koushik-windows",
|
||||
@@ -13,11 +13,15 @@
|
||||
// "scrypted.serverRoot": "/home/pi/.scrypted",
|
||||
|
||||
// local checkout
|
||||
"scrypted.debugHost": "127.0.0.1",
|
||||
"scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "127.0.0.1",
|
||||
// "scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
|
||||
"scrypted.pythonRemoteRoot": "${config:scrypted.serverRoot}/volume/plugin.zip",
|
||||
"python.analysis.extraPaths": [
|
||||
"./node_modules/@scrypted/sdk/types/scrypted_python"
|
||||
]
|
||||
],
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "ms-python.black-formatter"
|
||||
},
|
||||
"python.formatting.provider": "none"
|
||||
}
|
||||
4
plugins/python-codecs/package-lock.json
generated
4
plugins/python-codecs/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/python-codecs",
|
||||
"version": "0.1.57",
|
||||
"version": "0.1.72",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/python-codecs",
|
||||
"version": "0.1.57",
|
||||
"version": "0.1.72",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/python-codecs",
|
||||
"version": "0.1.57",
|
||||
"version": "0.1.72",
|
||||
"description": "Python Codecs for Scrypted",
|
||||
"keywords": [
|
||||
"scrypted",
|
||||
|
||||
@@ -12,3 +12,13 @@ def createVideoFrame(image) -> scrypted_sdk.VideoFrame:
|
||||
'timestamp': time.time() * 1000,
|
||||
'flush': flush,
|
||||
}
|
||||
|
||||
async def createImageMediaObject(image: scrypted_sdk.Image):
|
||||
ret = await scrypted_sdk.mediaManager.createMediaObject(image, scrypted_sdk.ScryptedMimeTypes.Image.value, {
|
||||
'format': None,
|
||||
'width': image.width,
|
||||
'height': image.height,
|
||||
'toBuffer': lambda options = None: image.toBuffer(options),
|
||||
'toImage': lambda options = None: image.toImage(options),
|
||||
})
|
||||
return ret
|
||||
|
||||
@@ -12,36 +12,54 @@ try:
|
||||
GObject.threads_init()
|
||||
Gst.init(None)
|
||||
except:
|
||||
pass
|
||||
Gst = None
|
||||
|
||||
async def createPipelineIterator(pipeline: str):
|
||||
async def createPipelineIterator(pipeline: str, gst = None):
|
||||
loop = asyncio.get_running_loop()
|
||||
pipeline = '{pipeline} ! queue leaky=downstream max-size-buffers=0 ! appsink name=appsink emit-signals=true sync=false max-buffers=-1 drop=true'.format(pipeline=pipeline)
|
||||
pipeline = '{pipeline} ! appsink name=appsink emit-signals=true sync=false'.format(pipeline=pipeline)
|
||||
print(pipeline)
|
||||
gst = Gst.parse_launch(pipeline)
|
||||
bus = gst.get_bus()
|
||||
finished = concurrent.futures.Future()
|
||||
|
||||
def on_bus_message(bus, message):
|
||||
t = str(message.type)
|
||||
# print(t)
|
||||
if t == str(Gst.MessageType.EOS):
|
||||
print('EOS: Stream ended.')
|
||||
finish()
|
||||
elif t == str(Gst.MessageType.WARNING):
|
||||
err, debug = message.parse_warning()
|
||||
print('Warning: %s: %s\n' % (err, debug))
|
||||
print('Ending stream due to warning. If this camera is causing errors, switch to the libav decoder.');
|
||||
finish();
|
||||
elif t == str(Gst.MessageType.ERROR):
|
||||
err, debug = message.parse_error()
|
||||
print('Error: %s: %s\n' % (err, debug))
|
||||
finish()
|
||||
newGst = not gst
|
||||
if gst:
|
||||
bin = Gst.parse_bin_from_description(pipeline, False)
|
||||
gst.add(bin)
|
||||
gst = bin
|
||||
|
||||
def stopGst():
|
||||
bus.remove_signal_watch()
|
||||
bus.disconnect(watchId)
|
||||
gst.set_state(Gst.State.NULL)
|
||||
def stopGst():
|
||||
gst.set_state(Gst.State.NULL)
|
||||
|
||||
else:
|
||||
gst = Gst.parse_launch(pipeline)
|
||||
|
||||
def on_bus_message(bus, message):
|
||||
t = str(message.type)
|
||||
# print(t)
|
||||
if t == str(Gst.MessageType.EOS):
|
||||
print('EOS: Stream ended.')
|
||||
finish()
|
||||
elif t == str(Gst.MessageType.WARNING):
|
||||
err, debug = message.parse_warning()
|
||||
print('Warning: %s: %s\n' % (err, debug))
|
||||
print('Ending stream due to warning. If this camera is causing errors, switch to the libav decoder.');
|
||||
finish()
|
||||
elif t == str(Gst.MessageType.ERROR):
|
||||
err, debug = message.parse_error()
|
||||
print('Error: %s: %s\n' % (err, debug))
|
||||
finish()
|
||||
|
||||
bus = gst.get_bus()
|
||||
watchId = bus.connect('message', on_bus_message)
|
||||
bus.add_signal_watch()
|
||||
|
||||
def stopGst():
|
||||
bus.remove_signal_watch()
|
||||
bus.disconnect(watchId)
|
||||
gst.set_state(Gst.State.NULL)
|
||||
|
||||
finished.add_done_callback(lambda _: threading.Thread(target=stopGst, name="StopGst").start())
|
||||
|
||||
hasFinished = False
|
||||
def finish():
|
||||
nonlocal hasFinished
|
||||
hasFinished = True
|
||||
@@ -50,12 +68,6 @@ async def createPipelineIterator(pipeline: str):
|
||||
if not finished.done():
|
||||
finished.set_result(None)
|
||||
|
||||
watchId = bus.connect('message', on_bus_message)
|
||||
bus.add_signal_watch()
|
||||
|
||||
finished = concurrent.futures.Future()
|
||||
finished.add_done_callback(lambda _: threading.Thread(target=stopGst, name="StopGst").start())
|
||||
hasFinished = False
|
||||
|
||||
appsink = gst.get_by_name('appsink')
|
||||
yieldQueue = Queue()
|
||||
@@ -76,10 +88,10 @@ async def createPipelineIterator(pipeline: str):
|
||||
finish()
|
||||
|
||||
|
||||
def on_new_sample(sink, preroll):
|
||||
def on_new_sample(sink):
|
||||
nonlocal hasFinished
|
||||
|
||||
sample = sink.emit('pull-preroll' if preroll else 'pull-sample')
|
||||
sample = sink.emit('pull-sample')
|
||||
|
||||
if hasFinished:
|
||||
return Gst.FlowReturn.OK
|
||||
@@ -91,18 +103,40 @@ async def createPipelineIterator(pipeline: str):
|
||||
pass
|
||||
return Gst.FlowReturn.OK
|
||||
|
||||
appsink.connect('new-preroll', on_new_sample, True)
|
||||
appsink.connect('new-sample', on_new_sample, False)
|
||||
appsink.connect('new-sample', on_new_sample)
|
||||
|
||||
gst.set_state(Gst.State.PLAYING)
|
||||
return gst, gen
|
||||
|
||||
def mainThread():
|
||||
async def asyncMain():
|
||||
gst, gen = createPipelineIterator('rtspsrc location=rtsp://localhost:59668/18cc179a814fd5b3 ! rtph264depay ! h264parse ! vtdec_hw ! videoconvert ! video/x-raw')
|
||||
gst, gen = await createPipelineIterator('rtspsrc location=rtsp://localhost:63876/674e895e04ddfd15 ! rtph264depay ! h264parse ! vtdec_hw ! video/x-raw(memory:GLMemory)')
|
||||
i = 0
|
||||
first = True
|
||||
async for sample in gen():
|
||||
print('sample')
|
||||
import time
|
||||
print(time.time())
|
||||
if first:
|
||||
first = False
|
||||
|
||||
for i in range(1, 10):
|
||||
caps = sample.get_caps()
|
||||
p = "appsrc name=appsrc emit-signals=True is-live=True \
|
||||
caps={caps} ! videocrop left=0 top=0 right=10 bottom=10 ! gldownload".format(caps = caps.to_string().replace(' ', ''))
|
||||
# p = "appsrc name=appsrc emit-signals=True is-live=True \
|
||||
# caps={caps} ! gldownload !\
|
||||
# videoconvert ! videoscale name=videoscale ! video/x-raw,format=RGB,width=640,height=480".format(caps = caps.to_string().replace(' ', ''))
|
||||
gst2, gen2 = await createPipelineIterator(p)
|
||||
appsrc = gst2.get_by_name('appsrc')
|
||||
vs = gst2.get_by_name('videoscale')
|
||||
g2 = gen2()
|
||||
|
||||
buffer = sample.get_buffer()
|
||||
appsrc.emit("push-buffer", buffer)
|
||||
s2 = await g2.__anext__()
|
||||
print(time.time())
|
||||
await g2.aclose()
|
||||
|
||||
i = i + 1
|
||||
if i == 10:
|
||||
break
|
||||
@@ -112,6 +146,8 @@ def mainThread():
|
||||
loop.run_forever()
|
||||
|
||||
if __name__ == "__main__":
|
||||
test = 334
|
||||
foo = f"{test}"
|
||||
threading.Thread(target = mainThread).start()
|
||||
mainLoop = GLib.MainLoop()
|
||||
mainLoop.run()
|
||||
|
||||
@@ -1,139 +1,396 @@
|
||||
from gst_generator import createPipelineIterator
|
||||
from util import optional_chain
|
||||
import scrypted_sdk
|
||||
import asyncio
|
||||
import platform
|
||||
from asyncio import Future
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
import vipsimage
|
||||
|
||||
import scrypted_sdk
|
||||
|
||||
import pilimage
|
||||
import platform
|
||||
from generator_common import createVideoFrame
|
||||
import vipsimage
|
||||
from generator_common import createImageMediaObject, createVideoFrame
|
||||
from gst_generator import Gst, createPipelineIterator
|
||||
from gstreamer_postprocess import (GstreamerFormatPostProcess,
|
||||
GstreamerPostProcess, OpenGLPostProcess,
|
||||
VaapiPostProcess, getBands)
|
||||
from util import optional_chain
|
||||
|
||||
Gst = None
|
||||
try:
|
||||
import gi
|
||||
gi.require_version('Gst', '1.0')
|
||||
gi.require_version('GstBase', '1.0')
|
||||
|
||||
from gi.repository import Gst
|
||||
except:
|
||||
pass
|
||||
class GstSession:
|
||||
def __init__(self, gst) -> None:
|
||||
self.gst = gst
|
||||
self.reuse = []
|
||||
|
||||
async def generateVideoFramesGstreamer(mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None, h264Decoder: str = None) -> scrypted_sdk.VideoFrame:
|
||||
ffmpegInput: scrypted_sdk.FFmpegInput = await scrypted_sdk.mediaManager.convertMediaObjectToJSON(mediaObject, scrypted_sdk.ScryptedMimeTypes.FFmpegInput.value)
|
||||
container = ffmpegInput.get('container', None)
|
||||
videosrc = ffmpegInput.get('url')
|
||||
videoCodec = optional_chain(ffmpegInput, 'mediaStreamOptions', 'video', 'codec')
|
||||
|
||||
if videosrc.startswith('tcp://'):
|
||||
parsed_url = urlparse(videosrc)
|
||||
videosrc = 'tcpclientsrc port=%s host=%s' % (
|
||||
parsed_url.port, parsed_url.hostname)
|
||||
if container == 'mpegts':
|
||||
videosrc += ' ! tsdemux'
|
||||
elif container == 'sdp':
|
||||
videosrc += ' ! sdpdemux'
|
||||
class GstImage(scrypted_sdk.Image):
|
||||
def __init__(self, gst: GstSession, sample, postProcessPipeline: str):
|
||||
super().__init__()
|
||||
caps = sample.get_caps()
|
||||
self.width = caps.get_structure(0).get_value("width")
|
||||
self.height = caps.get_structure(0).get_value("height")
|
||||
self.gst = gst
|
||||
self.sample = sample
|
||||
self.postProcessPipeline = postProcessPipeline
|
||||
self.cached: Future[scrypted_sdk.Image] = None
|
||||
|
||||
async def close(self):
|
||||
self.sample = None
|
||||
|
||||
async def toImage(self, options: scrypted_sdk.ImageOptions = None):
|
||||
options = options or {}
|
||||
# this is preferable currently because all detectors use rgb inputs
|
||||
# as opposed to yuv or rgba.
|
||||
# consider respecting the incoming format if provided?
|
||||
options["format"] = "rgb"
|
||||
|
||||
gstsample = await toGstSample(
|
||||
self.gst, self.sample, options, self.postProcessPipeline
|
||||
)
|
||||
caps = gstsample.get_caps()
|
||||
height = caps.get_structure(0).get_value("height")
|
||||
width = caps.get_structure(0).get_value("width")
|
||||
capsBands = getBands(caps)
|
||||
|
||||
gst_buffer = gstsample.get_buffer()
|
||||
result, info = gst_buffer.map(Gst.MapFlags.READ)
|
||||
if not result:
|
||||
raise Exception("unable to map gst buffer")
|
||||
|
||||
try:
|
||||
if vipsimage.pyvips:
|
||||
vips = vipsimage.new_from_memory(
|
||||
bytes(info.data), width, height, capsBands
|
||||
)
|
||||
image = vipsimage.VipsImage(vips)
|
||||
else:
|
||||
pil = pilimage.new_from_memory(
|
||||
bytes(info.data), width, height, capsBands
|
||||
)
|
||||
image = pilimage.PILImage(pil)
|
||||
|
||||
return await createImageMediaObject(image)
|
||||
finally:
|
||||
gst_buffer.unmap(info)
|
||||
|
||||
async def toBuffer(self, options: scrypted_sdk.ImageOptions = None):
|
||||
format = options and options.get("format")
|
||||
if format == "rgb":
|
||||
bands = 3
|
||||
elif format == "rgba":
|
||||
bands = 4
|
||||
elif format == "gray":
|
||||
bands = 1
|
||||
elif format == "jpg":
|
||||
bands = 0
|
||||
else:
|
||||
raise Exception('unknown container %s' % container)
|
||||
elif videosrc.startswith('rtsp'):
|
||||
videosrc = 'rtspsrc buffer-mode=0 location=%s protocols=tcp latency=0' % videosrc
|
||||
if videoCodec == 'h264':
|
||||
videosrc += ' ! rtph264depay ! h264parse'
|
||||
raise Exception(f"invalid output format {format}")
|
||||
|
||||
videocaps = 'video/x-raw'
|
||||
# if options and options.get('resize'):
|
||||
# videocaps = 'videoscale ! video/x-raw,width={width},height={height}'.format(width=options['resize']['width'], height=options['resize']['height'])
|
||||
gstsample = await toGstSample(
|
||||
self.gst, self.sample, options, self.postProcessPipeline
|
||||
)
|
||||
caps = gstsample.get_caps()
|
||||
height = caps.get_structure(0).get_value("height")
|
||||
width = caps.get_structure(0).get_value("width")
|
||||
# toGstSample may return the I420/NV12 image if there
|
||||
# is no transformation necessary. ie, a low res stream being used
|
||||
# for motion detection.
|
||||
if format == 'gray' and self.sample == gstsample:
|
||||
capsBands = 1
|
||||
else:
|
||||
capsBands = getBands(caps)
|
||||
|
||||
format = options and options.get('format')
|
||||
# I420 is a cheap way to get gray out of an h264 stream without color conversion.
|
||||
if format == 'gray':
|
||||
format = 'I420'
|
||||
bands = 1
|
||||
gst_buffer = gstsample.get_buffer()
|
||||
result, info = gst_buffer.map(Gst.MapFlags.READ)
|
||||
if not result:
|
||||
raise Exception("unable to map gst buffer")
|
||||
|
||||
try:
|
||||
stridePadding = (width * capsBands) % 4
|
||||
if stridePadding:
|
||||
stridePadding = 4 - stridePadding
|
||||
|
||||
if stridePadding:
|
||||
if capsBands != 1:
|
||||
raise Exception(
|
||||
f"found stride in conversion. this should not be possible. {caps.to_string()}"
|
||||
)
|
||||
width += stridePadding
|
||||
else:
|
||||
if format == "gray" and capsBands == 1:
|
||||
buffer = bytes(info.data)
|
||||
return buffer[0 : width * height]
|
||||
|
||||
if bands == capsBands:
|
||||
buffer = bytes(info.data)
|
||||
return buffer
|
||||
|
||||
if vipsimage.pyvips:
|
||||
vips = vipsimage.new_from_memory(info.data, width, height, capsBands)
|
||||
image = vipsimage.VipsImage(vips)
|
||||
else:
|
||||
pil = pilimage.new_from_memory(info.data, width, height, capsBands)
|
||||
image = pilimage.PILImage(pil)
|
||||
|
||||
# if bands == 1:
|
||||
# pil = pilimage.new_from_memory(info.data, width, height, capsBands)
|
||||
# pil.convert('RGB').save('/server/volume/test.jpg')
|
||||
|
||||
crop = None
|
||||
if stridePadding:
|
||||
crop = {
|
||||
"left": 0,
|
||||
"top": 0,
|
||||
"width": width - stridePadding,
|
||||
"height": height,
|
||||
}
|
||||
|
||||
reformat = None
|
||||
if bands and bands != capsBands:
|
||||
reformat = format
|
||||
|
||||
colored = None
|
||||
if reformat or crop:
|
||||
colored = image
|
||||
image = await image.toImageInternal(
|
||||
{
|
||||
"crop": crop,
|
||||
"format": reformat,
|
||||
}
|
||||
)
|
||||
try:
|
||||
return await image.toBuffer(
|
||||
{
|
||||
"format": format,
|
||||
}
|
||||
)
|
||||
finally:
|
||||
await image.close()
|
||||
if colored:
|
||||
await colored.close()
|
||||
finally:
|
||||
gst_buffer.unmap(info)
|
||||
|
||||
|
||||
async def createResamplerPipeline(
|
||||
sample,
|
||||
gst: GstSession,
|
||||
options: scrypted_sdk.ImageOptions,
|
||||
postProcessPipeline: str,
|
||||
):
|
||||
if not sample:
|
||||
raise Exception("Video Frame has been invalidated")
|
||||
|
||||
resize = None
|
||||
if options:
|
||||
resize = options.get("resize")
|
||||
if resize:
|
||||
resize = (resize.get("width"), resize.get("height"))
|
||||
|
||||
for check in gst.reuse:
|
||||
if check.resize == resize:
|
||||
gst.reuse.remove(check)
|
||||
return check
|
||||
|
||||
if postProcessPipeline == "VAAPI":
|
||||
pp = VaapiPostProcess()
|
||||
elif postProcessPipeline == "OpenGL (GPU memory)":
|
||||
pp = OpenGLPostProcess()
|
||||
elif postProcessPipeline == "OpenGL (system memory)":
|
||||
pp = OpenGLPostProcess()
|
||||
elif postProcessPipeline == None:
|
||||
pp = GstreamerFormatPostProcess()
|
||||
else:
|
||||
format = 'RGB'
|
||||
bands = 3
|
||||
|
||||
videocaps += ',format={format}'.format(format=format)
|
||||
# trap the pipeline before it gets here. videocrop
|
||||
# in the pipeline seems to spam the stdout??
|
||||
# use the legacy vips/pil post process.
|
||||
pp = GstreamerPostProcess()
|
||||
|
||||
caps = sample.get_caps()
|
||||
|
||||
srcCaps = caps.to_string().replace(" ", "")
|
||||
pipeline = f"appsrc name=appsrc format=time emit-signals=True is-live=True caps={srcCaps}"
|
||||
await pp.create(gst.gst, pipeline)
|
||||
pp.resize = resize
|
||||
|
||||
return pp
|
||||
|
||||
|
||||
async def toGstSample(
|
||||
gst: GstSession,
|
||||
sample,
|
||||
options: scrypted_sdk.ImageOptions,
|
||||
postProcessPipeline: str,
|
||||
) -> GstImage:
|
||||
if not sample:
|
||||
raise Exception("Video Frame has been invalidated")
|
||||
if not options:
|
||||
return sample
|
||||
|
||||
crop = options.get("crop")
|
||||
resize = options.get("resize")
|
||||
format = options.get("format")
|
||||
|
||||
caps = sample.get_caps()
|
||||
sampleWidth = caps.get_structure(0).get_value("width")
|
||||
sampleHeight = caps.get_structure(0).get_value("height")
|
||||
capsFormat = caps.get_structure(0).get_value("format")
|
||||
|
||||
# normalize format, eliminating it if possible
|
||||
if format == "jpg":
|
||||
# get into a format suitable to be be handled by vips/pil
|
||||
if capsFormat == "RGB" or capsFormat == "RGBA":
|
||||
sinkFormat = None
|
||||
else:
|
||||
sinkFormat = "RGBA"
|
||||
elif format == "rgb":
|
||||
if capsFormat == "RGB":
|
||||
sinkFormat = None
|
||||
else:
|
||||
sinkFormat = "RGB"
|
||||
elif format == "rgba":
|
||||
if capsFormat == "RGBA":
|
||||
sinkFormat = None
|
||||
else:
|
||||
sinkFormat = "RGBA"
|
||||
elif format == "gray":
|
||||
# are there others? does the output format depend on GPU?
|
||||
# have only ever seen NV12
|
||||
if capsFormat == "NV12" or capsFormat == "I420" or capsFormat == "GRAY8":
|
||||
sinkFormat = None
|
||||
else:
|
||||
sinkFormat = "GRAY8"
|
||||
elif format:
|
||||
raise Exception(f"invalid output format {format}")
|
||||
|
||||
if not crop and not resize and not sinkFormat:
|
||||
return sample
|
||||
|
||||
pp = await createResamplerPipeline(sample, gst, options, postProcessPipeline)
|
||||
try:
|
||||
pp.update(caps, (sampleWidth, sampleHeight), options)
|
||||
|
||||
appsrc = pp.gst.get_by_name("appsrc")
|
||||
srcCaps = caps.to_string().replace(" ", "")
|
||||
appsrc.set_property("caps", caps.from_string(srcCaps))
|
||||
|
||||
appsrc.emit("push-sample", sample)
|
||||
|
||||
newSample = await pp.g.__anext__()
|
||||
|
||||
gst.reuse.append(pp)
|
||||
except:
|
||||
await pp.g.aclose()
|
||||
raise
|
||||
|
||||
return newSample
|
||||
|
||||
|
||||
async def createGstMediaObject(image: GstImage):
|
||||
ret = await scrypted_sdk.mediaManager.createMediaObject(
|
||||
image,
|
||||
scrypted_sdk.ScryptedMimeTypes.Image.value,
|
||||
{
|
||||
"format": None,
|
||||
"width": image.width,
|
||||
"height": image.height,
|
||||
"toBuffer": lambda options=None: image.toBuffer(options),
|
||||
"toImage": lambda options=None: image.toImage(options),
|
||||
},
|
||||
)
|
||||
return ret
|
||||
|
||||
|
||||
async def generateVideoFramesGstreamer(
|
||||
mediaObject: scrypted_sdk.MediaObject,
|
||||
options: scrypted_sdk.VideoFrameGeneratorOptions = None,
|
||||
filter: Any = None,
|
||||
h264Decoder: str = None,
|
||||
postProcessPipeline: str = None,
|
||||
) -> scrypted_sdk.VideoFrame:
|
||||
ffmpegInput: scrypted_sdk.FFmpegInput = (
|
||||
await scrypted_sdk.mediaManager.convertMediaObjectToJSON(
|
||||
mediaObject, scrypted_sdk.ScryptedMimeTypes.FFmpegInput.value
|
||||
)
|
||||
)
|
||||
container = ffmpegInput.get("container", None)
|
||||
pipeline = ffmpegInput.get("url")
|
||||
videoCodec = optional_chain(ffmpegInput, "mediaStreamOptions", "video", "codec")
|
||||
|
||||
if pipeline.startswith("tcp://"):
|
||||
parsed_url = urlparse(pipeline)
|
||||
pipeline = "tcpclientsrc port=%s host=%s" % (
|
||||
parsed_url.port,
|
||||
parsed_url.hostname,
|
||||
)
|
||||
if container == "mpegts":
|
||||
pipeline += " ! tsdemux"
|
||||
elif container == "sdp":
|
||||
pipeline += " ! sdpdemux"
|
||||
else:
|
||||
raise Exception("unknown container %s" % container)
|
||||
elif pipeline.startswith("rtsp"):
|
||||
pipeline = (
|
||||
"rtspsrc buffer-mode=0 location=%s protocols=tcp latency=0" % pipeline
|
||||
)
|
||||
if videoCodec == "h264":
|
||||
pipeline += " ! rtph264depay ! h264parse"
|
||||
|
||||
decoder = None
|
||||
|
||||
def setDecoderClearDefault(value: str):
|
||||
nonlocal decoder
|
||||
decoder = value
|
||||
if decoder == 'Default':
|
||||
if decoder == "Default":
|
||||
decoder = None
|
||||
|
||||
setDecoderClearDefault(None)
|
||||
|
||||
if videoCodec == 'h264':
|
||||
if videoCodec == "h264":
|
||||
setDecoderClearDefault(h264Decoder)
|
||||
|
||||
if not decoder:
|
||||
# hw acceleration is "safe" to use on mac, but not
|
||||
# on other hosts where it may crash.
|
||||
# defaults must be safe.
|
||||
if platform.system() == 'Darwin':
|
||||
decoder = 'vtdec_hw'
|
||||
if platform.system() == "Darwin":
|
||||
decoder = "vtdec_hw"
|
||||
else:
|
||||
decoder = 'avdec_h264'
|
||||
decoder = "avdec_h264"
|
||||
else:
|
||||
# decodebin may pick a hardware accelerated decoder, which isn't ideal
|
||||
# so use a known software decoder for h264 and decodebin for anything else.
|
||||
decoder = 'decodebin'
|
||||
decoder = "decodebin"
|
||||
|
||||
fps = options and options.get('fps', None)
|
||||
videorate = ''
|
||||
fps = options and options.get("fps", None)
|
||||
videorate = ""
|
||||
if fps:
|
||||
videorate = 'videorate ! '
|
||||
videocaps += ',framerate={fps}/1'.format(fps=fps)
|
||||
videorate = f"! videorate max-rate={fps}"
|
||||
|
||||
if decoder.find("{videocaps}") == -1:
|
||||
videosrc += ' ! {decoder} ! queue leaky=downstream max-size-buffers=0 ! videoconvert ! {videorate} {videocaps}'.format(decoder=decoder, videocaps=videocaps, videorate=videorate)
|
||||
if postProcessPipeline == "VAAPI":
|
||||
pipeline += (
|
||||
f" ! {decoder} {videorate} ! queue leaky=downstream max-size-buffers=0"
|
||||
)
|
||||
elif postProcessPipeline == "OpenGL (GPU memory)":
|
||||
pipeline += f" ! {decoder} {videorate} ! queue leaky=downstream max-size-buffers=0 ! glupload"
|
||||
elif postProcessPipeline == "OpenGL (system memory)":
|
||||
pipeline += f" ! {decoder} {videorate} ! queue leaky=downstream max-size-buffers=0 ! video/x-raw ! glupload"
|
||||
else:
|
||||
if format == 'RGB':
|
||||
format = 'RGBA'
|
||||
bands = 4
|
||||
videocaps += 'A'
|
||||
d = decoder.replace('{videocaps}', '{videorate}{videocaps}'.format(videocaps=videocaps, videorate=videorate))
|
||||
videosrc += ' ! {decoder}'.format(decoder=d)
|
||||
pipeline += f" ! {decoder} ! video/x-raw {videorate} ! queue leaky=downstream max-size-buffers=0"
|
||||
# disable the gstreamer post process because videocrop spams the log
|
||||
postProcessPipeline = "Default"
|
||||
# postProcessPipeline = None
|
||||
|
||||
gst, gen = await createPipelineIterator(videosrc)
|
||||
|
||||
vipsImage: vipsimage.VipsImage = None
|
||||
pilImage: pilimage.PILImage = None
|
||||
print(pipeline)
|
||||
mo: scrypted_sdk.MediaObject = None
|
||||
|
||||
gst, gen = await createPipelineIterator(pipeline)
|
||||
gstImage: GstImage = None
|
||||
session = GstSession(gst)
|
||||
async for gstsample in gen():
|
||||
caps = gstsample.get_caps()
|
||||
height = caps.get_structure(0).get_value('height')
|
||||
width = caps.get_structure(0).get_value('width')
|
||||
gst_buffer = gstsample.get_buffer()
|
||||
result, info = gst_buffer.map(Gst.MapFlags.READ)
|
||||
if not result:
|
||||
continue
|
||||
|
||||
if not mo:
|
||||
gstImage = GstImage(session, gstsample, postProcessPipeline)
|
||||
mo = await createImageMediaObject(gstImage)
|
||||
gstImage.sample = gstsample
|
||||
try:
|
||||
if vipsimage.pyvips:
|
||||
vips = vipsimage.new_from_memory(info.data, width, height, bands)
|
||||
|
||||
if not mo:
|
||||
vipsImage = vipsimage.VipsImage(vips)
|
||||
mo = await vipsimage.createVipsMediaObject(vipsImage)
|
||||
|
||||
vipsImage.vipsImage = vips
|
||||
try:
|
||||
yield createVideoFrame(mo)
|
||||
finally:
|
||||
await vipsImage.close()
|
||||
else:
|
||||
pil = pilimage.new_from_memory(info.data, width, height, bands)
|
||||
|
||||
if not mo:
|
||||
pilImage = pilimage.PILImage(pil)
|
||||
mo = await pilimage.createPILMediaObject(pilImage)
|
||||
|
||||
pilImage.pilImage = pil
|
||||
try:
|
||||
yield createVideoFrame(mo)
|
||||
finally:
|
||||
await pilImage.close()
|
||||
yield createVideoFrame(mo)
|
||||
finally:
|
||||
gst_buffer.unmap(info)
|
||||
await gstImage.close()
|
||||
|
||||
278
plugins/python-codecs/src/gstreamer_postprocess.py
Normal file
278
plugins/python-codecs/src/gstreamer_postprocess.py
Normal file
@@ -0,0 +1,278 @@
|
||||
import scrypted_sdk
|
||||
from typing import Tuple
|
||||
from gst_generator import createPipelineIterator
|
||||
|
||||
def getCapsFormat(caps):
|
||||
return caps.get_structure(0).get_value('format')
|
||||
|
||||
def getBands(caps):
|
||||
capsFormat = getCapsFormat(caps)
|
||||
|
||||
if capsFormat == 'RGB':
|
||||
return 3
|
||||
elif capsFormat == 'RGBA':
|
||||
return 4
|
||||
elif capsFormat == 'GRAY8':
|
||||
return 1
|
||||
|
||||
raise Exception(f'unknown pixel format, please report this bug to @koush on Discord {capsFormat}')
|
||||
|
||||
def toCapsFormat(options: scrypted_sdk.ImageOptions):
|
||||
format = options.get('format')
|
||||
|
||||
if format == 'jpg':
|
||||
return 'RGB'
|
||||
elif format == 'rgb':
|
||||
return 'RGB'
|
||||
elif format == 'rgba':
|
||||
return 'RGBA'
|
||||
elif format == 'gray':
|
||||
return 'GRAY8'
|
||||
elif format:
|
||||
raise Exception(f'invalid output format {format}')
|
||||
else:
|
||||
return None
|
||||
|
||||
class GstreamerFormatPostProcess():
|
||||
def __init__(self) -> None:
|
||||
self.postprocess = ' ! videoconvert ! capsfilter name=capsfilter'
|
||||
self.resize = None
|
||||
|
||||
async def create(self, gst, pipeline: str):
|
||||
gst, gen = await createPipelineIterator(pipeline + self.postprocess, gst)
|
||||
g = gen()
|
||||
self.gst = gst
|
||||
self.g = g
|
||||
self.capsfilter = self.gst.get_by_name('capsfilter')
|
||||
|
||||
def update(self, caps, sampleSize: Tuple[int, int], options: scrypted_sdk.ImageOptions):
|
||||
sinkCaps = "video/x-raw"
|
||||
if format:
|
||||
sinkCaps += f",format={format}"
|
||||
self.capsfilter.set_property('caps', caps.from_string(sinkCaps))
|
||||
|
||||
class GstreamerPostProcess():
|
||||
def __init__(self) -> None:
|
||||
self.postprocess = ' ! videocrop name=videocrop ! videoconvert ! videoscale ! capsfilter name=scaleCapsFilter'
|
||||
self.resize = None
|
||||
|
||||
async def create(self, gst, pipeline: str):
|
||||
gst, gen = await createPipelineIterator(pipeline + self.postprocess, gst)
|
||||
g = gen()
|
||||
self.gst = gst
|
||||
self.g = g
|
||||
self.videocrop = self.gst.get_by_name('videocrop')
|
||||
self.scaleCapsFilter = self.gst.get_by_name('scaleCapsFilter')
|
||||
|
||||
def update(self, caps, sampleSize: Tuple[int, int], options: scrypted_sdk.ImageOptions):
|
||||
sampleWidth, sampleHeight = sampleSize
|
||||
|
||||
crop = options.get('crop')
|
||||
resize = options.get('resize')
|
||||
|
||||
if crop:
|
||||
left = int(crop['left'])
|
||||
top = int(crop['top'])
|
||||
width = int(crop['width'])
|
||||
height = int(crop['height'])
|
||||
# right and bottom crop values are pixel distance from the corresponding edge,
|
||||
# not a bounding box
|
||||
right = sampleWidth - (left + width)
|
||||
bottom = sampleHeight - (top + height)
|
||||
else:
|
||||
left = 0
|
||||
top = 0
|
||||
right = 0
|
||||
bottom = 0
|
||||
|
||||
videocrop = self.videocrop
|
||||
videocrop.set_property('left', left)
|
||||
videocrop.set_property('top', top)
|
||||
videocrop.set_property('right', right)
|
||||
videocrop.set_property('bottom', bottom)
|
||||
|
||||
scaleCaps = "video/x-raw,pixel-aspect-ratio=(fraction)1/1"
|
||||
if resize:
|
||||
width = resize.get('width')
|
||||
if width:
|
||||
xscale = resize['width'] / sampleWidth
|
||||
height = sampleHeight * xscale
|
||||
|
||||
height = resize.get('height')
|
||||
if height:
|
||||
yscale = resize['height'] / sampleHeight
|
||||
if not width:
|
||||
width = sampleWidth * yscale
|
||||
|
||||
width = int(width)
|
||||
height = int(height)
|
||||
|
||||
# pipeline += " ! videoscale"
|
||||
scaleCaps += f",width={width},height={height}"
|
||||
|
||||
# gstreamer aligns stride to a 4 byte boundary.
|
||||
# this makes it painful to get data out with RGB, NV12, or I420.
|
||||
format = toCapsFormat(options)
|
||||
if format != 'RGBA':
|
||||
if not format:
|
||||
format = 'RGBA'
|
||||
elif format == 'RGB':
|
||||
format = 'RGBA'
|
||||
elif format == 'GRAY8':
|
||||
pass
|
||||
else:
|
||||
raise Exception('unexpected target format returned from toCapsFormat')
|
||||
|
||||
scaleCaps += f",format={format}"
|
||||
|
||||
self.scaleCapsFilter.set_property('caps', caps.from_string(scaleCaps))
|
||||
|
||||
class VaapiPostProcess():
|
||||
def __init__(self) -> None:
|
||||
self.postprocess = ' ! vaapipostproc name=vaapipostproc ! capsfilter name=capsFilter'
|
||||
self.resize = None
|
||||
|
||||
async def create(self, gst, pipeline: str):
|
||||
gst, gen = await createPipelineIterator(pipeline + self.postprocess, gst)
|
||||
g = gen()
|
||||
self.gst = gst
|
||||
self.g = g
|
||||
self.vaapipostproc = self.gst.get_by_name('vaapipostproc')
|
||||
self.capsFilter = self.gst.get_by_name('capsFilter')
|
||||
|
||||
def update(self, caps, sampleSize: Tuple[int, int], options: scrypted_sdk.ImageOptions):
|
||||
sampleWidth, sampleHeight = sampleSize
|
||||
|
||||
crop = options.get('crop')
|
||||
resize = options.get('resize')
|
||||
|
||||
vaapipostproc = self.vaapipostproc
|
||||
|
||||
if resize:
|
||||
width = resize.get('width')
|
||||
if width:
|
||||
xscale = resize['width'] / sampleWidth
|
||||
height = sampleHeight * xscale
|
||||
|
||||
height = resize.get('height')
|
||||
if height:
|
||||
yscale = resize['height'] / sampleHeight
|
||||
if not width:
|
||||
width = sampleWidth * yscale
|
||||
|
||||
width = int(width)
|
||||
height = int(height)
|
||||
|
||||
outputWidth = width
|
||||
outputHeight = height
|
||||
else:
|
||||
outputWidth = 0
|
||||
outputHeight = 0
|
||||
|
||||
# vaapipostproc.set_property('width', outputWidth)
|
||||
# vaapipostproc.set_property('height', outputHeight)
|
||||
|
||||
# TODO: gray fast path?
|
||||
# not sure vaapi supports non-rgba across all hardware...
|
||||
# GST_VIDEO_FORMAT_RGBA (11) – rgb with alpha channel last
|
||||
# GST_VIDEO_FORMAT_GRAY8 (25) – 8-bit grayscale
|
||||
|
||||
format = toCapsFormat(options)
|
||||
if format != 'GRAY8' and format != 'RGBA':
|
||||
format = 'RGBA'
|
||||
# should RGBA be forced? not sure all devices can handle gray8?
|
||||
format = 'RGBA'
|
||||
|
||||
vaapipostproc.set_property('format', 11)
|
||||
self.capsFilter.set_property('caps', caps.from_string(f"video/x-raw,format={format},width={outputWidth},height={outputHeight}"))
|
||||
|
||||
if crop:
|
||||
left = int(crop['left'])
|
||||
top = int(crop['top'])
|
||||
width = int(crop['width'])
|
||||
height = int(crop['height'])
|
||||
# right and bottom crop values are pixel distance from the corresponding edge,
|
||||
# not a bounding box
|
||||
right = sampleWidth - (left + width)
|
||||
bottom = sampleHeight - (top + height)
|
||||
else:
|
||||
left = 0
|
||||
top = 0
|
||||
right = 300
|
||||
bottom = 300
|
||||
|
||||
vaapipostproc.set_property('crop-left', left)
|
||||
vaapipostproc.set_property('crop-top', top)
|
||||
vaapipostproc.set_property('crop-right', right)
|
||||
vaapipostproc.set_property('crop-bottom', bottom)
|
||||
|
||||
class OpenGLPostProcess():
|
||||
def __init__(self) -> None:
|
||||
self.postprocess = ' ! glcolorconvert ! gltransformation name=gltransformation ! glcolorscale ! capsfilter name=glCapsFilter caps="video/x-raw(memory:GLMemory),format=RGBA" ! gldownload'
|
||||
self.resize = None
|
||||
|
||||
async def create(self, gst, pipeline: str):
|
||||
gst, gen = await createPipelineIterator(pipeline + self.postprocess, gst)
|
||||
g = gen()
|
||||
self.gst = gst
|
||||
self.g = g
|
||||
# positions/scales the input into target texture
|
||||
self.gltransformation = self.gst.get_by_name('gltransformation')
|
||||
# sets the target texture size
|
||||
self.glCapsFilter = self.gst.get_by_name('glCapsFilter')
|
||||
|
||||
def update(self, caps, sampleSize: Tuple[int, int], options: scrypted_sdk.ImageOptions):
|
||||
sampleWidth, sampleHeight = sampleSize
|
||||
|
||||
crop = options.get('crop')
|
||||
resize = options.get('resize')
|
||||
|
||||
glCaps = "video/x-raw(memory:GLMemory),format=RGBA"
|
||||
if resize:
|
||||
width = resize.get('width')
|
||||
if width:
|
||||
xscale = resize['width'] / sampleWidth
|
||||
height = sampleHeight * xscale
|
||||
|
||||
height = resize.get('height')
|
||||
if height:
|
||||
yscale = resize['height'] / sampleHeight
|
||||
if not width:
|
||||
width = sampleWidth * yscale
|
||||
|
||||
width = int(width)
|
||||
height = int(height)
|
||||
|
||||
glCaps += f",width={width},height={height}"
|
||||
|
||||
self.glCapsFilter.set_property('caps', caps.from_string(glCaps))
|
||||
|
||||
if crop:
|
||||
left = int(crop['left'])
|
||||
top = int(crop['top'])
|
||||
width = int(crop['width'])
|
||||
height = int(crop['height'])
|
||||
|
||||
scaleX = sampleWidth / width
|
||||
scaleY = sampleHeight / height
|
||||
|
||||
# the default scale origin is the center.
|
||||
newCenterX = left + width / 2
|
||||
newCenterY = top + height / 2
|
||||
curCenterX = sampleWidth / 2
|
||||
curCenterY = sampleHeight / 2
|
||||
diffX = curCenterX - newCenterX
|
||||
diffY = curCenterY - newCenterY
|
||||
translationX = diffX / width
|
||||
translationY = diffY / height
|
||||
else:
|
||||
scaleX = 1
|
||||
scaleY = 1
|
||||
translationX = 0
|
||||
translationY = 0
|
||||
|
||||
gltransformation = self.gltransformation
|
||||
gltransformation.set_property('scale-x', scaleX)
|
||||
gltransformation.set_property('scale-y', scaleY)
|
||||
gltransformation.set_property('translation-x', translationX)
|
||||
gltransformation.set_property('translation-y', translationY)
|
||||
@@ -3,7 +3,7 @@ import scrypted_sdk
|
||||
from typing import Any
|
||||
import vipsimage
|
||||
import pilimage
|
||||
from generator_common import createVideoFrame
|
||||
from generator_common import createVideoFrame, createImageMediaObject
|
||||
|
||||
av = None
|
||||
try:
|
||||
@@ -54,7 +54,7 @@ async def generateVideoFramesLibav(mediaObject: scrypted_sdk.MediaObject, option
|
||||
|
||||
if not mo:
|
||||
vipsImage = vipsimage.VipsImage(vips)
|
||||
mo = await vipsimage.createVipsMediaObject(vipsImage)
|
||||
mo = await createImageMediaObject(vipsImage)
|
||||
|
||||
vipsImage.vipsImage = vips
|
||||
try:
|
||||
@@ -75,7 +75,7 @@ async def generateVideoFramesLibav(mediaObject: scrypted_sdk.MediaObject, option
|
||||
|
||||
if not mo:
|
||||
pilImage = pilimage.PILImage(pil)
|
||||
mo = await pilimage.createPILMediaObject(pilImage)
|
||||
mo = await createImageMediaObject(pilImage)
|
||||
|
||||
pilImage.pilImage = pil
|
||||
try:
|
||||
|
||||
@@ -31,7 +31,7 @@ class GstreamerGenerator(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.VideoFram
|
||||
async def generateVideoFrames(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None) -> scrypted_sdk.VideoFrame:
|
||||
worker = scrypted_sdk.fork()
|
||||
forked: CodecFork = await worker.result
|
||||
return await forked.generateVideoFramesGstreamer(mediaObject, options, filter, self.storage.getItem('h264Decoder'))
|
||||
return await forked.generateVideoFramesGstreamer(mediaObject, options, filter, self.storage.getItem('h264Decoder'), self.storage.getItem('postProcessPipeline'))
|
||||
|
||||
async def getSettings(self) -> List[Setting]:
|
||||
return [
|
||||
@@ -46,9 +46,20 @@ class GstreamerGenerator(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.VideoFram
|
||||
'vtdec_hw',
|
||||
'nvh264dec',
|
||||
'vaapih264dec',
|
||||
'vaapih264dec ! vaapipostproc ! {videocaps}',
|
||||
],
|
||||
'combobox': True,
|
||||
},
|
||||
{
|
||||
'key': 'postProcessPipeline',
|
||||
'title': 'Post Process Pipeline',
|
||||
'description': 'The Gstreamer pipeline to use to resize and scale frames.',
|
||||
'value': self.storage.getItem('postProcessPipeline') or 'Default',
|
||||
'choices': [
|
||||
'Default',
|
||||
'OpenGL (GPU memory)',
|
||||
'OpenGL (system memory)',
|
||||
'VAAPI',
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
@@ -138,10 +149,10 @@ def multiprocess_exit():
|
||||
os._exit(os.EX_OK)
|
||||
|
||||
class CodecFork:
|
||||
async def generateVideoFramesGstreamer(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions = None, filter: Any = None, h264Decoder: str = None) -> scrypted_sdk.VideoFrame:
|
||||
async def generateVideoFramesGstreamer(self, mediaObject: scrypted_sdk.MediaObject, options: scrypted_sdk.VideoFrameGeneratorOptions, filter: Any, h264Decoder: str, postProcessPipeline: str) -> scrypted_sdk.VideoFrame:
|
||||
start = time.time()
|
||||
try:
|
||||
async for data in gstreamer.generateVideoFramesGstreamer(mediaObject, options, filter, h264Decoder):
|
||||
async for data in gstreamer.generateVideoFramesGstreamer(mediaObject, options, filter, h264Decoder, postProcessPipeline):
|
||||
yield data
|
||||
except Exception as e:
|
||||
traceback.print_exc()
|
||||
|
||||
@@ -2,6 +2,7 @@ import scrypted_sdk
|
||||
from typing import Any
|
||||
from thread import to_thread
|
||||
import io
|
||||
from generator_common import createImageMediaObject
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
@@ -23,7 +24,7 @@ class PILImage(scrypted_sdk.Image):
|
||||
pil.close()
|
||||
|
||||
async def toBuffer(self, options: scrypted_sdk.ImageOptions = None) -> bytearray:
|
||||
pilImage: PILImage = await self.toPILImage(options)
|
||||
pilImage: PILImage = await self.toImageInternal(options)
|
||||
|
||||
if not options or not options.get('format', None):
|
||||
def format():
|
||||
@@ -53,20 +54,27 @@ class PILImage(scrypted_sdk.Image):
|
||||
|
||||
def save():
|
||||
bytesArray = io.BytesIO()
|
||||
pilImage.pilImage.save(bytesArray, format='JPEG')
|
||||
if pilImage.pilImage.mode == 'RGBA':
|
||||
rgb = pilImage.pilImage.convert('RGB')
|
||||
try:
|
||||
rgb.save(bytesArray, format='JPEG')
|
||||
finally:
|
||||
rgb.close()
|
||||
else:
|
||||
pilImage.pilImage.save(bytesArray, format='JPEG')
|
||||
# pilImage.pilImage.save(bytesArray, format=options['format'])
|
||||
return bytesArray.getvalue()
|
||||
|
||||
return await to_thread(lambda: save())
|
||||
|
||||
async def toPILImage(self, options: scrypted_sdk.ImageOptions = None):
|
||||
async def toImageInternal(self, options: scrypted_sdk.ImageOptions = None):
|
||||
return await to_thread(lambda: toPILImage(self, options))
|
||||
|
||||
async def toImage(self, options: scrypted_sdk.ImageOptions = None) -> Any:
|
||||
if options and options.get('format', None):
|
||||
raise Exception('format can only be used with toBuffer')
|
||||
newPILImage = await self.toPILImage(options)
|
||||
return await createPILMediaObject(newPILImage)
|
||||
newPILImage = await self.toImageInternal(options)
|
||||
return await createImageMediaObject(newPILImage)
|
||||
|
||||
def toPILImage(pilImageWrapper: PILImage, options: scrypted_sdk.ImageOptions = None) -> PILImage:
|
||||
pilImage = pilImageWrapper.pilImage
|
||||
@@ -94,16 +102,6 @@ def toPILImage(pilImageWrapper: PILImage, options: scrypted_sdk.ImageOptions = N
|
||||
|
||||
return PILImage(pilImage)
|
||||
|
||||
async def createPILMediaObject(image: PILImage):
|
||||
ret = await scrypted_sdk.mediaManager.createMediaObject(image, scrypted_sdk.ScryptedMimeTypes.Image.value, {
|
||||
'format': None,
|
||||
'width': image.width,
|
||||
'height': image.height,
|
||||
'toBuffer': lambda options = None: image.toBuffer(options),
|
||||
'toImage': lambda options = None: image.toImage(options),
|
||||
})
|
||||
return ret
|
||||
|
||||
class ImageReader(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter):
|
||||
def __init__(self, nativeId: str):
|
||||
super().__init__(nativeId)
|
||||
@@ -114,7 +112,7 @@ class ImageReader(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter)
|
||||
async def convert(self, data: Any, fromMimeType: str, toMimeType: str, options: scrypted_sdk.MediaObjectOptions = None) -> Any:
|
||||
pil = Image.open(io.BytesIO(data))
|
||||
pil.load()
|
||||
return await createPILMediaObject(PILImage(pil))
|
||||
return await createImageMediaObject(PILImage(pil))
|
||||
|
||||
class ImageWriter(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter):
|
||||
def __init__(self, nativeId: str):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import scrypted_sdk
|
||||
import asyncio
|
||||
from generator_common import createImageMediaObject
|
||||
|
||||
from typing import Any
|
||||
try:
|
||||
import pyvips
|
||||
@@ -23,7 +24,7 @@ class VipsImage(scrypted_sdk.Image):
|
||||
vips.invalidate()
|
||||
|
||||
async def toBuffer(self, options: scrypted_sdk.ImageOptions = None) -> bytearray:
|
||||
vipsImage: VipsImage = await self.toVipsImage(options)
|
||||
vipsImage: VipsImage = await self.toImageInternal(options)
|
||||
|
||||
if not options or not options.get('format', None):
|
||||
def format():
|
||||
@@ -61,14 +62,14 @@ class VipsImage(scrypted_sdk.Image):
|
||||
|
||||
return await to_thread(lambda: vipsImage.vipsImage.write_to_buffer('.' + options['format']))
|
||||
|
||||
async def toVipsImage(self, options: scrypted_sdk.ImageOptions = None):
|
||||
async def toImageInternal(self, options: scrypted_sdk.ImageOptions = None):
|
||||
return await to_thread(lambda: toVipsImage(self, options))
|
||||
|
||||
async def toImage(self, options: scrypted_sdk.ImageOptions = None) -> Any:
|
||||
if options and options.get('format', None):
|
||||
raise Exception('format can only be used with toBuffer')
|
||||
newVipsImage = await self.toVipsImage(options)
|
||||
return await createVipsMediaObject(newVipsImage)
|
||||
newVipsImage = await self.toImageInternal(options)
|
||||
return await createImageMediaObject(newVipsImage)
|
||||
|
||||
def toVipsImage(vipsImageWrapper: VipsImage, options: scrypted_sdk.ImageOptions = None) -> VipsImage:
|
||||
vipsImage = vipsImageWrapper.vipsImage
|
||||
@@ -99,16 +100,6 @@ def toVipsImage(vipsImageWrapper: VipsImage, options: scrypted_sdk.ImageOptions
|
||||
|
||||
return VipsImage(vipsImage)
|
||||
|
||||
async def createVipsMediaObject(image: VipsImage):
|
||||
ret = await scrypted_sdk.mediaManager.createMediaObject(image, scrypted_sdk.ScryptedMimeTypes.Image.value, {
|
||||
'format': None,
|
||||
'width': image.width,
|
||||
'height': image.height,
|
||||
'toBuffer': lambda options = None: image.toBuffer(options),
|
||||
'toImage': lambda options = None: image.toImage(options),
|
||||
})
|
||||
return ret
|
||||
|
||||
class ImageReader(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter):
|
||||
def __init__(self, nativeId: str):
|
||||
super().__init__(nativeId)
|
||||
@@ -118,7 +109,7 @@ class ImageReader(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter)
|
||||
|
||||
async def convert(self, data: Any, fromMimeType: str, toMimeType: str, options: scrypted_sdk.MediaObjectOptions = None) -> Any:
|
||||
vips = Image.new_from_buffer(data, '')
|
||||
return await createVipsMediaObject(VipsImage(vips))
|
||||
return await createImageMediaObject(VipsImage(vips))
|
||||
|
||||
class ImageWriter(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter):
|
||||
def __init__(self, nativeId: str):
|
||||
@@ -134,3 +125,6 @@ class ImageWriter(scrypted_sdk.ScryptedDeviceBase, scrypted_sdk.BufferConverter)
|
||||
|
||||
def new_from_memory(data, width: int, height: int, bands: int):
|
||||
return Image.new_from_memory(data, width, height, bands, pyvips.BandFormat.UCHAR)
|
||||
|
||||
def new_from_buffer(data, width: int, height: int, bands: int):
|
||||
return Image.new_from_buffer(data, width, height, bands, pyvips.BandFormat.UCHAR)
|
||||
|
||||
6
plugins/ring/package-lock.json
generated
6
plugins/ring/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/ring",
|
||||
"version": "0.0.109",
|
||||
"version": "0.0.110",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/ring",
|
||||
"version": "0.0.109",
|
||||
"version": "0.0.110",
|
||||
"dependencies": {
|
||||
"@koush/ring-client-api": "file:../../external/ring-client-api",
|
||||
"@scrypted/common": "file:../../common",
|
||||
@@ -49,7 +49,7 @@
|
||||
},
|
||||
"../../sdk": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.86",
|
||||
"version": "0.2.101",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
|
||||
@@ -44,5 +44,5 @@
|
||||
"got": "11.8.6",
|
||||
"socket.io-client": "^2.5.0"
|
||||
},
|
||||
"version": "0.0.109"
|
||||
"version": "0.0.110"
|
||||
}
|
||||
|
||||
@@ -101,14 +101,29 @@ export class RingCameraDevice extends ScryptedDeviceBase implements DeviceProvid
|
||||
this.console.log(camera.name, 'onDoorbellPressed', e);
|
||||
this.triggerBinaryState();
|
||||
});
|
||||
let motionTimeout: NodeJS.Timeout;
|
||||
const resetTimeout = () => {
|
||||
clearTimeout(motionTimeout);
|
||||
motionTimeout = setTimeout(() => this.motionDetected = false, 30000);
|
||||
};
|
||||
camera.onMotionDetected?.subscribe(async motionDetected => {
|
||||
if (motionDetected)
|
||||
if (motionDetected) {
|
||||
this.console.log(camera.name, 'onMotionDetected');
|
||||
resetTimeout();
|
||||
}
|
||||
else {
|
||||
clearTimeout(motionTimeout);
|
||||
}
|
||||
this.motionDetected = motionDetected;
|
||||
});
|
||||
camera.onMotionDetectedPolling?.subscribe(async motionDetected => {
|
||||
if (motionDetected)
|
||||
if (motionDetected) {
|
||||
this.console.log(camera.name, 'onMotionDetected');
|
||||
resetTimeout();
|
||||
}
|
||||
else {
|
||||
clearTimeout(motionTimeout);
|
||||
}
|
||||
this.motionDetected = motionDetected;
|
||||
});
|
||||
camera.onBatteryLevel?.subscribe(async () => {
|
||||
|
||||
@@ -27,7 +27,7 @@ export class SipCallSession extends Subscribed {
|
||||
private sipManager: SipManager
|
||||
) {
|
||||
super()
|
||||
if( !sipManager ) {
|
||||
if( !this.sipManager ) {
|
||||
this.sipManager = this.createSipManager( sipOptions )
|
||||
}
|
||||
//TODO: make this more clean
|
||||
@@ -35,7 +35,7 @@ export class SipCallSession extends Subscribed {
|
||||
this.callEnded(false)
|
||||
} ))
|
||||
|
||||
sipManager.setSipOptions( sipOptions )
|
||||
this.sipManager.setSipOptions( sipOptions )
|
||||
}
|
||||
|
||||
static async createCallSession(console: Console, cameraName: string, sipOptions: SipOptions, sipManager?: SipManager ) {
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
|
||||
{
|
||||
// docker installation
|
||||
"scrypted.debugHost": "koushik-ubuntu",
|
||||
"scrypted.serverRoot": "/server",
|
||||
// "scrypted.debugHost": "koushik-ubuntu",
|
||||
// "scrypted.serverRoot": "/server",
|
||||
|
||||
// pi local installation
|
||||
// "scrypted.debugHost": "192.168.2.119",
|
||||
// "scrypted.serverRoot": "/home/pi/.scrypted",
|
||||
|
||||
// local checkout
|
||||
// "scrypted.debugHost": "127.0.0.1",
|
||||
// "scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
"scrypted.debugHost": "127.0.0.1",
|
||||
"scrypted.serverRoot": "/Users/koush/.scrypted",
|
||||
// "scrypted.debugHost": "koushik-windows",
|
||||
// "scrypted.serverRoot": "C:\\Users\\koush\\.scrypted",
|
||||
|
||||
|
||||
4
plugins/tensorflow-lite/package-lock.json
generated
4
plugins/tensorflow-lite/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/tensorflow-lite",
|
||||
"version": "0.1.15",
|
||||
"version": "0.1.17",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/tensorflow-lite",
|
||||
"version": "0.1.15",
|
||||
"version": "0.1.17",
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
}
|
||||
|
||||
@@ -34,6 +34,12 @@
|
||||
"@scrypted/objectdetector"
|
||||
],
|
||||
"runtime": "python",
|
||||
"pythonVersion": {
|
||||
"default": "3.9",
|
||||
"darwin": {
|
||||
"arm64": "3.10"
|
||||
}
|
||||
},
|
||||
"type": "API",
|
||||
"interfaces": [
|
||||
"Settings",
|
||||
@@ -43,5 +49,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.1.15"
|
||||
"version": "0.1.17"
|
||||
}
|
||||
|
||||
@@ -107,7 +107,7 @@ class Prediction:
|
||||
self.score = score
|
||||
self.bbox = bbox
|
||||
|
||||
class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Settings):
|
||||
class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter):
|
||||
labels: dict
|
||||
|
||||
def __init__(self, nativeId: str | None = None):
|
||||
|
||||
@@ -88,7 +88,6 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
|
||||
self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=self.interpreter_count, thread_name_prefix="tflite", )
|
||||
|
||||
async def getSettings(self) -> list[Setting]:
|
||||
ret = await super().getSettings()
|
||||
coral: Setting = {
|
||||
'title': 'Detected Edge TPU',
|
||||
'description': 'The device paths of the Coral Edge TPUs that will be used for detections.',
|
||||
@@ -96,10 +95,7 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
|
||||
'readonly': True,
|
||||
'key': 'coral',
|
||||
}
|
||||
|
||||
ret.append(coral)
|
||||
|
||||
return ret
|
||||
return [coral]
|
||||
|
||||
# width, height, channels
|
||||
def get_input_details(self) -> Tuple[int, int, int]:
|
||||
|
||||
4
plugins/webrtc/package-lock.json
generated
4
plugins/webrtc/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/webrtc",
|
||||
"version": "0.1.46",
|
||||
"version": "0.1.49",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/webrtc",
|
||||
"version": "0.1.46",
|
||||
"version": "0.1.49",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
"@scrypted/sdk": "file:../../sdk",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/webrtc",
|
||||
"version": "0.1.46",
|
||||
"version": "0.1.49",
|
||||
"scripts": {
|
||||
"scrypted-setup-project": "scrypted-setup-project",
|
||||
"prescrypted-setup-project": "scrypted-package-json",
|
||||
|
||||
@@ -327,9 +327,10 @@ export function parseOptions(options: RTCSignalingOptions) {
|
||||
sessionSupportsH264High = true;
|
||||
|
||||
const transcodeWidth = Math.max(640, Math.min(options?.screen?.width || 960, 1280));
|
||||
const width = options?.screen?.width;
|
||||
const height = options?.screen?.height;
|
||||
const max = Math.max(width, height) * options?.screen?.devicePixelRatio;
|
||||
const devicePixelRatio = options?.screen?.devicePixelRatio || 1;
|
||||
const width = (options?.screen?.width * devicePixelRatio) || undefined;
|
||||
const height = (options?.screen?.height * devicePixelRatio) || undefined;
|
||||
const max = Math.max(width, height);
|
||||
const isMediumResolution = !sessionSupportsH264High || (max && max < 1920);
|
||||
|
||||
return {
|
||||
@@ -429,7 +430,7 @@ export class WebRTCConnectionManagement implements RTCConnectionManagement {
|
||||
});
|
||||
logConnectionState(console, this.pc);
|
||||
waitConnected(this.pc)
|
||||
.then(() => logIsLocalIceTransport(this.console, this.pc)).catch(() => {});
|
||||
.then(() => logIsLocalIceTransport(this.console, this.pc)).catch(() => { });
|
||||
|
||||
this.weriftSignalingSession = new WeriftSignalingSession(console, this.pc);
|
||||
}
|
||||
|
||||
Submodule sdk/developer.scrypted.app updated: 672acce02b...86648eac03
4
sdk/package-lock.json
generated
4
sdk/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.101",
|
||||
"version": "0.2.102",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.101",
|
||||
"version": "0.2.102",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@babel/preset-typescript": "^7.18.6",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/sdk",
|
||||
"version": "0.2.101",
|
||||
"version": "0.2.102",
|
||||
"description": "",
|
||||
"main": "dist/src/index.js",
|
||||
"exports": {
|
||||
|
||||
4
sdk/types/package-lock.json
generated
4
sdk/types/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/types",
|
||||
"version": "0.2.91",
|
||||
"version": "0.2.93",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/types",
|
||||
"version": "0.2.91",
|
||||
"version": "0.2.93",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@types/rimraf": "^3.0.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/types",
|
||||
"version": "0.2.91",
|
||||
"version": "0.2.93",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"author": "",
|
||||
|
||||
@@ -9,6 +9,16 @@ from typing import Union, Any, Callable
|
||||
from .other import *
|
||||
|
||||
|
||||
class AirPurifierMode(Enum):
|
||||
Automatic = "Automatic"
|
||||
Manual = "Manual"
|
||||
|
||||
class AirPurifierStatus(Enum):
|
||||
Active = "Active"
|
||||
ActiveNightMode = "ActiveNightMode"
|
||||
Idle = "Idle"
|
||||
Inactive = "Inactive"
|
||||
|
||||
class AirQuality(Enum):
|
||||
Excellent = "Excellent"
|
||||
Fair = "Fair"
|
||||
@@ -49,6 +59,7 @@ class PanTiltZoomMovement(Enum):
|
||||
|
||||
class ScryptedDeviceType(Enum):
|
||||
API = "API"
|
||||
AirPurifier = "AirPurifier"
|
||||
Automation = "Automation"
|
||||
Builtin = "Builtin"
|
||||
Camera = "Camera"
|
||||
@@ -83,6 +94,7 @@ class ScryptedDeviceType(Enum):
|
||||
WindowCovering = "WindowCovering"
|
||||
|
||||
class ScryptedInterface(Enum):
|
||||
AirPurifier = "AirPurifier"
|
||||
AirQualitySensor = "AirQualitySensor"
|
||||
AmbientLightSensor = "AmbientLightSensor"
|
||||
AudioSensor = "AudioSensor"
|
||||
@@ -106,6 +118,7 @@ class ScryptedInterface(Enum):
|
||||
EntrySensor = "EntrySensor"
|
||||
EventRecorder = "EventRecorder"
|
||||
Fan = "Fan"
|
||||
FilterMaintenance = "FilterMaintenance"
|
||||
FloodSensor = "FloodSensor"
|
||||
HttpRequestHandler = "HttpRequestHandler"
|
||||
HumiditySensor = "HumiditySensor"
|
||||
@@ -252,6 +265,7 @@ class HttpResponseOptions(TypedDict):
|
||||
class ObjectDetectionResult(TypedDict):
|
||||
boundingBox: tuple[float, float, float, float]
|
||||
className: str
|
||||
cost: float
|
||||
history: ObjectDetectionHistory
|
||||
id: str
|
||||
movement: Union[ObjectDetectionHistory, Any]
|
||||
@@ -318,6 +332,13 @@ class AdoptDevice(TypedDict):
|
||||
settings: DeviceCreatorSettings
|
||||
pass
|
||||
|
||||
class AirPurifierState(TypedDict):
|
||||
lockPhysicalControls: bool
|
||||
mode: AirPurifierMode
|
||||
speed: float
|
||||
status: AirPurifierStatus
|
||||
pass
|
||||
|
||||
class ColorHsv(TypedDict):
|
||||
h: float
|
||||
s: float
|
||||
@@ -732,6 +753,12 @@ class VideoFrameGeneratorOptions(TypedDict):
|
||||
class TamperState(TypedDict):
|
||||
pass
|
||||
|
||||
class AirPurifier:
|
||||
airPurifierState: AirPurifierState
|
||||
async def setAirPurifierState(self, state: AirPurifierState) -> None:
|
||||
pass
|
||||
pass
|
||||
|
||||
class AirQualitySensor:
|
||||
airQuality: AirQuality
|
||||
pass
|
||||
@@ -863,6 +890,11 @@ class Fan:
|
||||
pass
|
||||
pass
|
||||
|
||||
class FilterMaintenance:
|
||||
filterChangeIndication: bool
|
||||
filterLifeLevel: float
|
||||
pass
|
||||
|
||||
class FloodSensor:
|
||||
flooded: bool
|
||||
pass
|
||||
@@ -1399,6 +1431,9 @@ class ScryptedInterfaceProperty(Enum):
|
||||
noxDensity = "noxDensity"
|
||||
co2ppm = "co2ppm"
|
||||
airQuality = "airQuality"
|
||||
airPurifierState = "airPurifierState"
|
||||
filterChangeIndication = "filterChangeIndication"
|
||||
filterLifeLevel = "filterLifeLevel"
|
||||
humiditySetting = "humiditySetting"
|
||||
fan = "fan"
|
||||
applicationInfo = "applicationInfo"
|
||||
@@ -1480,6 +1515,7 @@ class ScryptedInterfaceMethods(Enum):
|
||||
putSetting = "putSetting"
|
||||
armSecuritySystem = "armSecuritySystem"
|
||||
disarmSecuritySystem = "disarmSecuritySystem"
|
||||
setAirPurifierState = "setAirPurifierState"
|
||||
getReadmeMarkdown = "getReadmeMarkdown"
|
||||
getOauthUrl = "getOauthUrl"
|
||||
onOauthCallback = "onOauthCallback"
|
||||
@@ -1911,6 +1947,27 @@ class DeviceState:
|
||||
def airQuality(self, value: AirQuality):
|
||||
self.setScryptedProperty("airQuality", value)
|
||||
|
||||
@property
|
||||
def airPurifierState(self) -> AirPurifierState:
|
||||
return self.getScryptedProperty("airPurifierState")
|
||||
@airPurifierState.setter
|
||||
def airPurifierState(self, value: AirPurifierState):
|
||||
self.setScryptedProperty("airPurifierState", value)
|
||||
|
||||
@property
|
||||
def filterChangeIndication(self) -> bool:
|
||||
return self.getScryptedProperty("filterChangeIndication")
|
||||
@filterChangeIndication.setter
|
||||
def filterChangeIndication(self, value: bool):
|
||||
self.setScryptedProperty("filterChangeIndication", value)
|
||||
|
||||
@property
|
||||
def filterLifeLevel(self) -> float:
|
||||
return self.getScryptedProperty("filterLifeLevel")
|
||||
@filterLifeLevel.setter
|
||||
def filterLifeLevel(self, value: float):
|
||||
self.setScryptedProperty("filterLifeLevel", value)
|
||||
|
||||
@property
|
||||
def humiditySetting(self) -> HumiditySettingStatus:
|
||||
return self.getScryptedProperty("humiditySetting")
|
||||
@@ -2430,6 +2487,23 @@ ScryptedInterfaceDescriptors = {
|
||||
"airQuality"
|
||||
]
|
||||
},
|
||||
"AirPurifier": {
|
||||
"name": "AirPurifier",
|
||||
"methods": [
|
||||
"setAirPurifierState"
|
||||
],
|
||||
"properties": [
|
||||
"airPurifierState"
|
||||
]
|
||||
},
|
||||
"FilterMaintenance": {
|
||||
"name": "FilterMaintenance",
|
||||
"methods": [],
|
||||
"properties": [
|
||||
"filterChangeIndication",
|
||||
"filterLifeLevel"
|
||||
]
|
||||
},
|
||||
"Readme": {
|
||||
"name": "Readme",
|
||||
"methods": [
|
||||
|
||||
@@ -131,6 +131,7 @@ export enum ScryptedDeviceType {
|
||||
SecuritySystem = "SecuritySystem",
|
||||
WindowCovering = "WindowCovering",
|
||||
Siren = "Siren",
|
||||
AirPurifier = "AirPurifier",
|
||||
Unknown = "Unknown",
|
||||
}
|
||||
/**
|
||||
@@ -1181,6 +1182,36 @@ export interface Position {
|
||||
export interface PositionSensor {
|
||||
position?: Position;
|
||||
}
|
||||
export enum AirPurifierStatus {
|
||||
Inactive = "Inactive",
|
||||
Idle = "Idle",
|
||||
Active = "Active",
|
||||
ActiveNightMode = "ActiveNightMode",
|
||||
}
|
||||
|
||||
export enum AirPurifierMode {
|
||||
Manual = "Manual",
|
||||
Automatic = "Automatic",
|
||||
}
|
||||
|
||||
export interface AirPurifierState {
|
||||
speed?: number;
|
||||
status?: AirPurifierStatus,
|
||||
mode?: AirPurifierMode,
|
||||
lockPhysicalControls?: boolean,
|
||||
}
|
||||
|
||||
export interface AirPurifier {
|
||||
airPurifierState?: AirPurifierState;
|
||||
|
||||
setAirPurifierState(state: AirPurifierState): Promise<void>;
|
||||
}
|
||||
|
||||
export interface FilterMaintenance {
|
||||
filterLifeLevel?: number,
|
||||
filterChangeIndication?: boolean,
|
||||
}
|
||||
|
||||
export interface PM10Sensor {
|
||||
pm10Density?: number;
|
||||
}
|
||||
@@ -1253,6 +1284,10 @@ export interface ObjectDetectionResult extends BoundingBoxResult {
|
||||
* The id of the tracked object.
|
||||
*/
|
||||
id?: string;
|
||||
/**
|
||||
* The certainty that this is correct tracked object.
|
||||
*/
|
||||
cost?: number;
|
||||
/**
|
||||
* The detection class of the object.
|
||||
*/
|
||||
@@ -1349,6 +1384,7 @@ export interface ImageOptions {
|
||||
resize?: {
|
||||
width?: number,
|
||||
height?: number,
|
||||
filter?: 'nearest' | 'bilinear' | 'lanczos' | 'mitchell',
|
||||
};
|
||||
format?: ImageFormat;
|
||||
}
|
||||
@@ -1945,6 +1981,8 @@ export enum ScryptedInterface {
|
||||
NOXSensor = "NOXSensor",
|
||||
CO2Sensor = "CO2Sensor",
|
||||
AirQualitySensor = "AirQualitySensor",
|
||||
AirPurifier = "AirPurifier",
|
||||
FilterMaintenance = "FilterMaintenance",
|
||||
Readme = "Readme",
|
||||
OauthClient = "OauthClient",
|
||||
MixinProvider = "MixinProvider",
|
||||
|
||||
1
server/.vscode/launch.json
vendored
1
server/.vscode/launch.json
vendored
@@ -15,6 +15,7 @@
|
||||
"preLaunchTask": "npm: build",
|
||||
"program": "${workspaceFolder}/bin/scrypted-serve",
|
||||
"runtimeArgs": [
|
||||
"--dns-result-order=ipv4first",
|
||||
"--trace-warnings",
|
||||
"--nolazy",
|
||||
],
|
||||
|
||||
888
server/package-lock.json
generated
888
server/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,58 +1,55 @@
|
||||
{
|
||||
"name": "@scrypted/server",
|
||||
"version": "0.23.0",
|
||||
"version": "0.39.0",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@mapbox/node-pre-gyp": "^1.0.10",
|
||||
"@scrypted/types": "^0.2.91",
|
||||
"@scrypted/types": "^0.2.93",
|
||||
"adm-zip": "^0.5.10",
|
||||
"axios": "^0.21.4",
|
||||
"axios": "^1.4.0",
|
||||
"body-parser": "^1.20.2",
|
||||
"cookie-parser": "^1.4.6",
|
||||
"debug": "^4.3.4",
|
||||
"engine.io": "^6.4.1",
|
||||
"engine.io": "^6.4.2",
|
||||
"express": "^4.18.2",
|
||||
"ffmpeg-static": "^5.1.0",
|
||||
"http-auth": "^4.2.0",
|
||||
"ip": "^1.1.8",
|
||||
"level": "^6.0.1",
|
||||
"level": "^8.0.0",
|
||||
"linkfs": "^2.1.0",
|
||||
"lodash": "^4.17.21",
|
||||
"memfs": "^3.5.0",
|
||||
"memfs": "^3.5.2",
|
||||
"mime": "^3.0.0",
|
||||
"mkdirp": "^1.0.4",
|
||||
"nan": "^2.17.0",
|
||||
"node-dijkstra": "^2.5.0",
|
||||
"node-forge": "^1.3.1",
|
||||
"node-gyp": "^8.4.1",
|
||||
"node-gyp": "^9.3.1",
|
||||
"router": "^1.3.8",
|
||||
"semver": "^7.3.8",
|
||||
"semver": "^7.5.1",
|
||||
"source-map-support": "^0.5.21",
|
||||
"tar": "^6.1.13",
|
||||
"tslib": "^2.5.0",
|
||||
"typescript": "^4.9.5",
|
||||
"whatwg-mimetype": "^2.3.0",
|
||||
"tar": "^6.1.15",
|
||||
"tslib": "^2.5.3",
|
||||
"typescript": "^5.1.3",
|
||||
"whatwg-mimetype": "^3.0.0",
|
||||
"ws": "^8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/adm-zip": "^0.4.34",
|
||||
"@types/adm-zip": "^0.5.0",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/debug": "^4.1.7",
|
||||
"@types/debug": "^4.1.8",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/http-auth": "^4.1.1",
|
||||
"@types/ip": "^1.1.0",
|
||||
"@types/lodash": "^4.14.192",
|
||||
"@types/lodash": "^4.14.195",
|
||||
"@types/mime": "^3.0.1",
|
||||
"@types/mkdirp": "^1.0.2",
|
||||
"@types/node-dijkstra": "^2.5.3",
|
||||
"@types/node-forge": "^1.3.2",
|
||||
"@types/pem": "^1.9.6",
|
||||
"@types/rimraf": "^3.0.2",
|
||||
"@types/semver": "^7.3.13",
|
||||
"@types/semver": "^7.5.0",
|
||||
"@types/source-map-support": "^0.5.6",
|
||||
"@types/tar": "^4.0.5",
|
||||
"@types/whatwg-mimetype": "^2.1.1",
|
||||
"@types/ws": "^7.4.7"
|
||||
"@types/tar": "^6.1.5",
|
||||
"@types/whatwg-mimetype": "^3.0.0",
|
||||
"@types/ws": "^8.5.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"node-pty-prebuilt-multiarch": "^0.10.1-pre.5"
|
||||
|
||||
@@ -603,8 +603,25 @@ class PluginRemote:
|
||||
f.write(requirements)
|
||||
f.close()
|
||||
|
||||
p = subprocess.Popen([sys.executable, '-m', 'pip', 'install', '-r', requirementstxt,
|
||||
'--prefix', python_prefix], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
try:
|
||||
pythonVersion = packageJson['scrypted']['pythonVersion']
|
||||
except:
|
||||
pythonVersion = None
|
||||
|
||||
pipArgs = [
|
||||
sys.executable,
|
||||
'-m', 'pip', 'install', '-r', requirementstxt,
|
||||
'--prefix', python_prefix
|
||||
]
|
||||
if pythonVersion:
|
||||
print('Specific Python verison requested. Forcing reinstall.')
|
||||
# prevent uninstalling system packages.
|
||||
pipArgs.append('--ignore-installed')
|
||||
# force reinstall even if it exists in system packages.
|
||||
pipArgs.append('--force-reinstall')
|
||||
|
||||
p = subprocess.Popen(pipArgs, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
|
||||
while True:
|
||||
line = p.stdout.readline()
|
||||
if not line:
|
||||
@@ -625,8 +642,15 @@ class PluginRemote:
|
||||
|
||||
sys.path.insert(0, zipPath)
|
||||
if platform.system() != 'Windows':
|
||||
site_packages = os.path.join(
|
||||
python_prefix, 'lib', python_version, 'site-packages')
|
||||
# local/lib/dist-packages seen on python3.10 on ubuntu.
|
||||
# TODO: find a way to programatically get this value, or switch to venv.
|
||||
dist_packages = os.path.join(
|
||||
python_prefix, 'local', 'lib', python_version, 'dist-packages')
|
||||
if os.path.exists(dist_packages):
|
||||
site_packages = dist_packages
|
||||
else:
|
||||
site_packages = os.path.join(
|
||||
python_prefix, 'lib', python_version, 'site-packages')
|
||||
else:
|
||||
site_packages = os.path.join(
|
||||
python_prefix, 'Lib', 'site-packages')
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
export function asyncFilter<T>(asyncIterable: AsyncIterable<T>, predicate: (t: T) => Promise<boolean>): AsyncIterable<T> {
|
||||
return {
|
||||
async* [Symbol.asyncIterator]() {
|
||||
for await (const value of asyncIterable) {
|
||||
if (await predicate(value)) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function asyncFind<T>(asyncIterable: AsyncIterable<T>, predicate: (t: T) => Promise<boolean>): Promise<T> {
|
||||
for await (const value of asyncIterable) {
|
||||
if (await predicate(value)) {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
export function hasSameElements<T>(a: T[], b: T[]): boolean {
|
||||
const s1 = new Set(a);
|
||||
const s2 = new Set(b);
|
||||
if (s1.size != s2.size)
|
||||
return false;
|
||||
for (const e of s1) {
|
||||
if (!s2.has(e))
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -1,27 +1,4 @@
|
||||
// Type definitions for abstract-leveldown 5.0
|
||||
// Project: https://github.com/Level/abstract-leveldown
|
||||
// Definitions by: Meirion Hughes <https://github.com/MeirionHughes>
|
||||
// Daniel Byrne <https://github.com/danwbyrne>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
// TypeScript Version: 2.3
|
||||
|
||||
export interface AbstractOptions {
|
||||
// wtf is this?
|
||||
readonly [k: string]: any;
|
||||
}
|
||||
|
||||
export type ErrorCallback = (err: Error | undefined) => void;
|
||||
export type ErrorValueCallback<V> = (err: Error | undefined, value: V) => void;
|
||||
export type ErrorKeyValueCallback<K, V> = (err: Error | undefined, key: K, value: V) => void;
|
||||
|
||||
export interface AbstractOpenOptions extends AbstractOptions {
|
||||
createIfMissing?: boolean;
|
||||
errorIfExists?: boolean;
|
||||
}
|
||||
|
||||
export interface AbstractGetOptions extends AbstractOptions {
|
||||
asBuffer?: boolean;
|
||||
}
|
||||
import { GetOptions, Level, OpenOptions, PutOptions } from 'level';
|
||||
|
||||
export interface LevelDocument {
|
||||
_id?: any;
|
||||
@@ -32,265 +9,107 @@ export interface LevelDocumentConstructor<T extends LevelDocument> {
|
||||
new(): T;
|
||||
}
|
||||
|
||||
export interface AbstractLevelDOWN<K = any, V = any> /* extends AbstractOptions */ {
|
||||
open(cb?: ErrorCallback): Promise<void>;
|
||||
open(options: AbstractOpenOptions, cb?: ErrorCallback): Promise<void>;
|
||||
|
||||
close(cb?: ErrorCallback): void;
|
||||
|
||||
get(key: K, cb?: ErrorValueCallback<V>): Promise<V>;
|
||||
get(key: K, options: AbstractGetOptions, cb?: ErrorValueCallback<V>): Promise<V>;
|
||||
|
||||
put(key: K, value: V, cb?: ErrorCallback): Promise<void>;
|
||||
put(key: K, value: V, options: AbstractOptions, cb?: ErrorCallback): Promise<void>;
|
||||
|
||||
del(key: K, cb?: ErrorCallback): Promise<void>;
|
||||
del(key: K, options: AbstractOptions, cb?: ErrorCallback): Promise<void>;
|
||||
|
||||
batch(): AbstractChainedBatch<K, V>;
|
||||
batch(array: ReadonlyArray<AbstractBatch<K, V>>, cb?: ErrorCallback): AbstractChainedBatch<K, V>;
|
||||
batch(
|
||||
array: ReadonlyArray<AbstractBatch<K, V>>,
|
||||
options: AbstractOptions,
|
||||
cb?: ErrorCallback,
|
||||
): AbstractChainedBatch<K, V>;
|
||||
|
||||
iterator(options?: AbstractIteratorOptions<K>): AbstractIterator<K, V>;
|
||||
|
||||
[Symbol.asyncIterator](): AsyncIterator<{ key: K, value: V }>;
|
||||
nextId(): number;
|
||||
tryGet<T extends LevelDocument>(documentConstructor: LevelDocumentConstructor<T>, _id: any, options?: AbstractGetOptions): Promise<T | undefined>;
|
||||
getAll<T extends LevelDocument>(documentConstructor: LevelDocumentConstructor<T>, options?: AbstractGetOptions): AsyncIterable<T>;
|
||||
upsert<T extends LevelDocument>(value: T, options?: AbstractOptions): Promise<T>;
|
||||
remove<T extends LevelDocument>(value: T): Promise<void>;
|
||||
removeId<T extends LevelDocument>(documentConstructor: LevelDocumentConstructor<T>, _id: any): Promise<void>;
|
||||
removeAll<T extends LevelDocument>(documentConstructor: LevelDocumentConstructor<T>): Promise<void>;
|
||||
getCount<T extends LevelDocument>(documentConstructor: LevelDocumentConstructor<T>, options?: AbstractGetOptions): Promise<number>;
|
||||
}
|
||||
|
||||
export interface AbstractLevelDOWNConstructor {
|
||||
// tslint:disable-next-line no-unnecessary-generics
|
||||
new <K = any, V = any>(location: string): AbstractLevelDOWN<K, V>;
|
||||
// tslint:disable-next-line no-unnecessary-generics
|
||||
<K = any, V = any>(location: string): AbstractLevelDOWN<K, V>;
|
||||
}
|
||||
|
||||
export interface AbstractIteratorOptions<K = any> extends AbstractOptions {
|
||||
gt?: K;
|
||||
gte?: K;
|
||||
lt?: K;
|
||||
lte?: K;
|
||||
reverse?: boolean;
|
||||
limit?: number;
|
||||
keys?: boolean;
|
||||
values?: boolean;
|
||||
keyAsBuffer?: boolean;
|
||||
valueAsBuffer?: boolean;
|
||||
}
|
||||
|
||||
export type AbstractBatch<K = any, V = any> = PutBatch<K, V> | DelBatch<K, V>;
|
||||
|
||||
export interface PutBatch<K = any, V = any> {
|
||||
readonly type: 'put';
|
||||
readonly key: K;
|
||||
readonly value: V;
|
||||
}
|
||||
|
||||
export interface DelBatch<K = any, V = any> {
|
||||
readonly type: 'del';
|
||||
readonly key: K;
|
||||
}
|
||||
|
||||
export interface AbstractChainedBatch<K = any, V = any> extends AbstractOptions {
|
||||
put: (key: K, value: V) => this;
|
||||
del: (key: K) => this;
|
||||
clear: () => this;
|
||||
write(cb?: ErrorCallback): any;
|
||||
write(options: any, cb?: ErrorCallback): any;
|
||||
}
|
||||
|
||||
export interface AbstractChainedBatchConstructor {
|
||||
// tslint:disable-next-line no-unnecessary-generics
|
||||
new <K = any, V = any>(db: any): AbstractChainedBatch<K, V>;
|
||||
// tslint:disable-next-line no-unnecessary-generics
|
||||
<K = any, V = any>(db: any): AbstractChainedBatch<K, V>;
|
||||
}
|
||||
|
||||
export interface AbstractIterator<K, V> extends AbstractOptions {
|
||||
db: AbstractLevelDOWN<K, V>;
|
||||
next(cb?: ErrorKeyValueCallback<K, V>): this;
|
||||
end(cb?: ErrorCallback): void;
|
||||
}
|
||||
|
||||
export interface AbstractIteratorConstructor {
|
||||
// tslint:disable-next-line no-unnecessary-generics
|
||||
new <K = any, V = any>(db: any): AbstractIterator<K, V>;
|
||||
// tslint:disable-next-line no-unnecessary-generics
|
||||
<K = any, V = any>(db: any): AbstractIterator<K, V>;
|
||||
}
|
||||
|
||||
export interface Level extends AbstractLevelDOWN {
|
||||
readonly location: string;
|
||||
readonly prefix: string;
|
||||
readonly version: string | number;
|
||||
destroy(location: string, cb?: (err: Error | undefined) => void): void;
|
||||
destroy(location: string, prefix: string, cb?: (err: Error | undefined) => void): void;
|
||||
}
|
||||
|
||||
interface LevelOptions {
|
||||
readonly prefix?: string;
|
||||
readonly version?: string | number;
|
||||
}
|
||||
|
||||
|
||||
interface LevelConstructor {
|
||||
new(location: string, options?: LevelOptions, callback?: (err: Error) => void): Level;
|
||||
(location: string, options?: LevelOptions, callback?: (err: Error) => void): Level;
|
||||
}
|
||||
|
||||
declare const Level: LevelConstructor;
|
||||
|
||||
const level = require('level') as LevelConstructor;
|
||||
|
||||
function createLevelDocument(documentConstructor: any, json: any) {
|
||||
const doc = new documentConstructor();
|
||||
Object.assign(doc, JSON.parse(json));
|
||||
return doc;
|
||||
}
|
||||
|
||||
const wrapped = (location: string, options?: LevelOptions, callback?: (err: Error) => void) => {
|
||||
const ret = level(location, options, callback);
|
||||
ret.tryGet = async (documentConstructor: any, _id: any, options?: AbstractGetOptions): Promise<any> => {
|
||||
export class WrappedLevel extends Level<string, string | number> {
|
||||
curId: number;
|
||||
|
||||
async open(): Promise<void>;
|
||||
async open(options?: OpenOptions): Promise<void> {
|
||||
await super.open(options);
|
||||
try {
|
||||
this.curId = parseInt(await this.get('_id') as string);
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
if (!this.curId)
|
||||
this.curId = 0;
|
||||
}
|
||||
|
||||
async tryGet(documentConstructor: any, _id: any, options?: GetOptions<string, string | number>) {
|
||||
try {
|
||||
const _documentType = documentConstructor.name;
|
||||
const key = `${_documentType}/${_id}`;
|
||||
const json = await ret.get(key, options);
|
||||
const json = await this.get(key, options)
|
||||
return createLevelDocument(documentConstructor, json);
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
}
|
||||
|
||||
const iterable = {
|
||||
async*[Symbol.asyncIterator]() {
|
||||
const iterator = ret.iterator();
|
||||
try {
|
||||
while (true) {
|
||||
const { key, value } = await new Promise<{ key: any, value: any }>((resolve, reject) => {
|
||||
iterator.next((err, key, value) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve({ key, value });
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if (key == null && value == null)
|
||||
break;
|
||||
yield {
|
||||
key,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
finally {
|
||||
await new Promise(resolve => iterator.end(resolve));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ret[Symbol.asyncIterator] = iterable[Symbol.asyncIterator] as any;
|
||||
ret.getAll = (documentConstructor: any, options?: AbstractGetOptions): AsyncIterable<any> => {
|
||||
async* getAll(documentConstructor: any): AsyncIterable<any> {
|
||||
const _documentType = documentConstructor.name;
|
||||
const prefix = `${_documentType}/`;
|
||||
return {
|
||||
async*[Symbol.asyncIterator]() {
|
||||
for await (const entry of ret) {
|
||||
if (entry.key.startsWith(prefix)) {
|
||||
const doc = createLevelDocument(documentConstructor, entry.value);
|
||||
if (doc._documentType === _documentType) {
|
||||
yield doc;
|
||||
}
|
||||
}
|
||||
for await (const [key, value] of this.iterator()) {
|
||||
if (key.startsWith(prefix)) {
|
||||
const doc = createLevelDocument(documentConstructor, value);
|
||||
if (doc._documentType === _documentType) {
|
||||
yield doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ret.getCount = async (documentConstructor: any, options?: AbstractGetOptions): Promise<any> => {
|
||||
async getCount(documentConstructor: any) {
|
||||
let count = 0;
|
||||
for await (const doc of ret.getAll(documentConstructor)) {
|
||||
for await (const doc of this.getAll(documentConstructor)) {
|
||||
count++;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
let curId: number;
|
||||
|
||||
const oldOpen = ret.open.bind(ret);
|
||||
(ret as any).open = async (...args: any) => {
|
||||
try {
|
||||
curId = parseInt(await ret.get('_id'));
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
if (!curId)
|
||||
curId = 0;
|
||||
return oldOpen(...args);
|
||||
}
|
||||
|
||||
ret.nextId = () => {
|
||||
if (typeof curId !== 'number')
|
||||
nextId() {
|
||||
if (typeof this.curId !== 'number')
|
||||
throw new Error('curId is not a number');
|
||||
return ++curId;
|
||||
return ++this.curId;
|
||||
}
|
||||
|
||||
const saveId = async () => {
|
||||
return ret.put("_id", curId);
|
||||
async saveId() {
|
||||
return this.put("_id", this.curId);
|
||||
}
|
||||
|
||||
ret.upsert = async (value: LevelDocument, options?: AbstractOptions): Promise<any> => {
|
||||
async upsert(value: LevelDocument, options?: PutOptions<string, string | number>): Promise<any> {
|
||||
const _documentType = value.constructor.name;
|
||||
if (!value._id)
|
||||
value._id = ret.nextId();
|
||||
value._id = this.nextId();
|
||||
|
||||
await saveId();
|
||||
await this.saveId();
|
||||
|
||||
value._documentType = _documentType;
|
||||
const key = `${_documentType}/${value._id}`;
|
||||
await ret.put(key, JSON.stringify(value), options);
|
||||
await this.put(key, JSON.stringify(value), options);
|
||||
return value;
|
||||
};
|
||||
|
||||
ret.remove = async (value: LevelDocument) => {
|
||||
async remove(value: LevelDocument) {
|
||||
const _documentType = value.constructor.name;
|
||||
let { _id } = value;
|
||||
const key = `${_documentType}/${_id}`;
|
||||
await ret.del(key);
|
||||
await this.del(key);
|
||||
}
|
||||
|
||||
ret.removeId = async (documentConstructor: LevelDocumentConstructor<any>, _id: any) => {
|
||||
async removeId(documentConstructor: LevelDocumentConstructor<any>, _id: any) {
|
||||
const _documentType = documentConstructor.name;
|
||||
const key = `${_documentType}/${_id}`;
|
||||
await ret.del(key);
|
||||
await this.del(key);
|
||||
}
|
||||
|
||||
ret.removeAll = async (documentConstructor: LevelDocumentConstructor<any>) => {
|
||||
async removeAll(documentConstructor: LevelDocumentConstructor<any>) {
|
||||
const _documentType = documentConstructor.name;
|
||||
const prefix = `${_documentType}/`;
|
||||
for await (const entry of ret) {
|
||||
if (entry.key.startsWith(prefix)) {
|
||||
const doc = createLevelDocument(documentConstructor, entry.value);
|
||||
for await (const [key, value] of this.iterator()) {
|
||||
if (key.startsWith(prefix)) {
|
||||
const doc = createLevelDocument(documentConstructor, value);
|
||||
if (doc._documentType === _documentType) {
|
||||
await ret.del(entry.key);
|
||||
await this.del(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
};
|
||||
|
||||
export default wrapped as LevelConstructor;
|
||||
export default WrappedLevel;
|
||||
|
||||
@@ -4,13 +4,12 @@ import pathToFfmpeg from 'ffmpeg-static';
|
||||
import fs from 'fs';
|
||||
import https from 'https';
|
||||
import mimeType from 'mime';
|
||||
import mkdirp from "mkdirp";
|
||||
import Graph from 'node-dijkstra';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import MimeType from 'whatwg-mimetype';
|
||||
import { MediaObjectRemote } from "./plugin-api";
|
||||
import { MediaObject } from "./mediaobject";
|
||||
import { MediaObjectRemote } from "./plugin-api";
|
||||
|
||||
function typeMatches(target: string, candidate: string): boolean {
|
||||
// candidate will accept anything
|
||||
@@ -202,7 +201,9 @@ export abstract class MediaManagerBase implements MediaManager {
|
||||
if (!filesPath)
|
||||
throw new Error('SCRYPTED_PLUGIN_VOLUME env variable not set?');
|
||||
const ret = path.join(filesPath, 'files');
|
||||
mkdirp.sync(ret);
|
||||
await fs.promises.mkdir(ret, {
|
||||
recursive: true,
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
@@ -371,8 +371,11 @@ export class PluginDeviceProxyHandler implements PrimitiveProxyHandler<any>, Scr
|
||||
if (found) {
|
||||
const { mixin, entry } = found;
|
||||
const { proxy } = entry;
|
||||
if (!proxy)
|
||||
throw new PluginError(`device is unavailable ${this.id} (mixin ${mixin.mixinProviderId})`);
|
||||
if (!proxy) {
|
||||
const pluginDevice = this.scrypted.findPluginDeviceById(this.id);
|
||||
const name = pluginDevice ? 'Unknown Device' : getState(pluginDevice, ScryptedInterfaceProperty.name);
|
||||
throw new PluginError(`device "${name}" is unavailable [id: ${this.id}] [mixin: ${mixin.mixinProviderId}]`);
|
||||
}
|
||||
return proxy[method](...argArray);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import os from 'os';
|
||||
import { Device, EngineIOHandler } from '@scrypted/types';
|
||||
import AdmZip from 'adm-zip';
|
||||
import crypto from 'crypto';
|
||||
import * as io from 'engine.io';
|
||||
import fs from 'fs';
|
||||
import mkdirp from 'mkdirp';
|
||||
import net from 'net';
|
||||
import os from 'os';
|
||||
import path from 'path';
|
||||
import rimraf from 'rimraf';
|
||||
import { Duplex } from 'stream';
|
||||
import WebSocket from 'ws';
|
||||
import { Plugin } from '../db-types';
|
||||
@@ -205,9 +203,17 @@ export class PluginHost {
|
||||
{
|
||||
const zipDirTmp = zipDir + '.tmp';
|
||||
if (!fs.existsSync(zipFile)) {
|
||||
rimraf.sync(zipDirTmp);
|
||||
rimraf.sync(zipDir);
|
||||
mkdirp.sync(zipDirTmp);
|
||||
fs.rmSync(zipDirTmp, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
fs.rmSync(zipDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
fs.mkdirSync(zipDirTmp, {
|
||||
recursive: true,
|
||||
});
|
||||
fs.writeFileSync(path.join(zipDirTmp, zipFilename), zipBuffer);
|
||||
const admZip = new AdmZip(zipBuffer);
|
||||
admZip.extractAllTo(path.join(zipDirTmp, 'unzipped'), true);
|
||||
@@ -294,6 +300,7 @@ export class PluginHost {
|
||||
throw new Error(`Unsupported Scrypted runtime: ${this.packageJson.scrypted.runtime}`);
|
||||
|
||||
this.worker = workerHost(this.scrypted.mainFilename, this.pluginId, {
|
||||
packageJson: this.packageJson,
|
||||
env,
|
||||
pluginDebug,
|
||||
});
|
||||
@@ -356,15 +363,23 @@ export class PluginHost {
|
||||
}
|
||||
});
|
||||
|
||||
const startupTime = Date.now();
|
||||
// the plugin is expected to send process stats every 10 seconds.
|
||||
// this can be used as a check for liveness.
|
||||
let lastStats: number;
|
||||
const statsInterval = setInterval(async () => {
|
||||
// plugin may take a while to install, so wait
|
||||
// for 1 stats report before starting the watchdog.
|
||||
if (!lastStats)
|
||||
const now = Date.now();
|
||||
// plugin may take a while to install, so wait 10 minutes.
|
||||
// after that, require 1 minute checkins.
|
||||
if (!lastStats) {
|
||||
if (now - startupTime > 10 * 60 * 1000) {
|
||||
const logger = await this.api.getLogger(undefined);
|
||||
logger.log('e', 'plugin failed to start in a timely manner. restarting.');
|
||||
this.api.requestRestart();
|
||||
}
|
||||
return;
|
||||
if (!pluginDebug && (lastStats + 60000 < Date.now())) {
|
||||
}
|
||||
if (!pluginDebug && (lastStats + 60000 < now)) {
|
||||
const logger = await this.api.getLogger(undefined);
|
||||
logger.log('e', 'plugin is unresponsive. restarting.');
|
||||
this.api.requestRestart();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user