Compare commits

..

38 Commits

Author SHA1 Message Date
Koushik Dutta
c70375db06 prerelease 2023-03-27 09:37:39 -07:00
Koushik Dutta
2c23021d40 server: catch/print startup errors to console and not just events tab 2023-03-27 09:37:29 -07:00
Koushik Dutta
84a4ef4539 mac: reorder unpin 2023-03-27 09:02:37 -07:00
Koushik Dutta
7f3db0549b python-codecs: update requirements.txt 2023-03-27 08:52:20 -07:00
Koushik Dutta
de0e1784a3 amcrest: fix camera default name 2023-03-27 08:50:01 -07:00
Koushik Dutta
5a8798638e homekit: do not start two way audio if only an rtcp packet is received 2023-03-27 08:48:40 -07:00
Koushik Dutta
14da49728c videoanalysis: remove old pipeline 2023-03-26 23:28:52 -07:00
Koushik Dutta
55423b2d09 videoanalysis: yuv/gray extraction fixes 2023-03-26 23:03:08 -07:00
Koushik Dutta
596106247b python-codecs: fix libav and pil issues 2023-03-26 22:43:13 -07:00
Koushik Dutta
5472d90368 opencv: beta 2023-03-26 19:21:22 -07:00
Koushik Dutta
fcf58413fc prebeta 2023-03-26 12:25:30 -07:00
Koushik Dutta
0d03b91753 server: add query tokens to env auth 2023-03-26 12:25:23 -07:00
Koushik Dutta
2fd088e4d6 prebeta 2023-03-26 12:09:21 -07:00
Koushik Dutta
c6933198b2 server: autocreate admin if specified by env 2023-03-26 12:09:15 -07:00
Koushik Dutta
210e684a22 docker: fix watchtower scope https://github.com/koush/scrypted/issues/662 2023-03-26 11:38:38 -07:00
Koushik Dutta
53cc4b6ef3 python-codecs: fix older version of pil 2023-03-26 11:36:09 -07:00
Koushik Dutta
d58d138a68 mac: trim deps, unpin hacked up gst libs 2023-03-25 22:03:14 -07:00
Koushik Dutta
c0199a2b76 mac: remove gstreamer hack from install script 2023-03-25 21:55:57 -07:00
Koushik Dutta
badb1905ce prerelease 2023-03-25 21:54:40 -07:00
Koushik Dutta
735c2dce7b Merge branch 'main' of github.com:koush/scrypted 2023-03-25 21:52:56 -07:00
Koushik Dutta
ffae3f246f python-codecs: fix mac crash 2023-03-25 21:52:51 -07:00
Koushik Dutta
31b424f89f server: mac python fixes 2023-03-25 21:52:32 -07:00
Brett Jia
3b7acc3a90 homekit: merge child lights into cameras (#659) 2023-03-25 20:09:42 -07:00
Koushik Dutta
7e66d1ac7f prebeta 2023-03-25 19:45:11 -07:00
Koushik Dutta
a613da069e server: relax failure on python arch mismatch 2023-03-25 19:45:05 -07:00
Koushik Dutta
40b73c6589 prebeta 2023-03-25 18:42:52 -07:00
Koushik Dutta
ef16ca83a2 server: detect python architecture vs machine mismatch 2023-03-25 18:42:39 -07:00
Koushik Dutta
76bf1d0d3f docker: rollback linux changes 2023-03-25 18:35:40 -07:00
Koushik Dutta
3d5ccf25d1 server: log host os specs 2023-03-25 15:05:08 -07:00
Koushik Dutta
36fcb713d9 videoanalysis: ffmpeg frame generator fixes 2023-03-25 15:04:40 -07:00
Koushik Dutta
e306631850 docker: arm fixes 2023-03-25 14:40:37 -07:00
Koushik Dutta
17400fa886 docker: arm fixes 2023-03-25 14:37:17 -07:00
Koushik Dutta
c6dc628616 docker: arm fixes 2023-03-25 14:31:40 -07:00
Koushik Dutta
f974653e73 videoanalysis: make new pipeline the default 2023-03-25 12:05:35 -07:00
Koushik Dutta
b83880a8a3 Merge branch 'main' of github.com:koush/scrypted 2023-03-25 11:34:37 -07:00
Koushik Dutta
ee4d8f52df pam-diff: fixup score reporting 2023-03-25 11:34:33 -07:00
Brett Jia
3854b75c6e arlo: video clips + virtual security system for sirens (#656)
* fix doorbell device type

* bump 0.7.1 for beta

* standalone camera fixes

* bump 0.7.2 for beta

* more type annotations + trickle discover all devices

* fetch arlo library clips

* log options

* cache library at lower level and fetch clips on demand

* move library timedelta range lower in stack

* wip siren as security system

* virtual security system and tweaks

* vss documentation and settings

* expand vss usage docs

* more docs changes

* force homekit and scrypted to update given vss and siren state

* RE-ENABLING SIREN!!!

* bump 0.7.3 for beta

* bump 0.7.3 for release
2023-03-25 11:13:28 -07:00
Koushik Dutta
07c3173506 docker: fix pip execution command 2023-03-25 10:43:12 -07:00
53 changed files with 700 additions and 791 deletions

View File

@@ -59,6 +59,10 @@ RUN apt-get -y install \
# armv7l does not have wheels for any of these
# and compile times would forever, if it works at all.
# furthermore, it's possible to run 32bit docker on 64bit arm,
# which causes weird behavior in python which looks at the arch version
# which still reports 64bit, even if running in 32bit docker.
# this scenario is not supported and will be reported at runtime.
RUN if [ "$(uname -m)" = "armv7l" ]; \
then \
apt-get -y install \
@@ -73,7 +77,7 @@ RUN if [ "$(uname -m)" = "armv7l" ]; \
RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
################################################################

View File

@@ -90,4 +90,4 @@ services:
# Must match the port in the auto update url above.
- 10444:8080
# check for updates once an hour (interval is in seconds)
command: --interval 3600 --cleanup
command: --interval 3600 --cleanup --scope scrypted

View File

@@ -44,51 +44,25 @@ RUN_IGNORE brew install node@18
RUN brew install libvips
# dlib
RUN brew install cmake
# gstreamer plugins
RUN_IGNORE brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly
# gst python bindings
RUN_IGNORE brew install gst-python
# python image library
# todo: consider removing this
RUN_IGNORE brew install pillow
### HACK WORKAROUND
### https://github.com/koush/scrypted/issues/544
brew unpin gstreamer
brew unpin gst-python
brew unpin gst-plugins-ugly
brew unpin gst-plugins-good
brew unpin gst-plugins-base
brew unpin gst-plugins-good
brew unpin gst-plugins-bad
brew unpin gst-plugins-ugly
brew unpin gst-libav
brew unlink gstreamer
brew unlink gst-python
brew unlink gst-plugins-ugly
brew unlink gst-plugins-good
brew unlink gst-plugins-base
brew unlink gst-plugins-bad
brew unlink gst-libav
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gstreamer.rb && brew install ./gstreamer.rb
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-python.rb && brew install ./gst-python.rb
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-ugly.rb && brew install ./gst-plugins-ugly.rb
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-good.rb && brew install ./gst-plugins-good.rb
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-base.rb && brew install ./gst-plugins-base.rb
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-plugins-bad.rb && brew install ./gst-plugins-bad.rb
curl -O https://raw.githubusercontent.com/Homebrew/homebrew-core/49a8667f0c1a6579fe887bc0fa1c0ce682eb01c8/Formula/gst-libav.rb && brew install ./gst-libav.rb
brew pin gstreamer
brew pin gst-python
brew pin gst-plugins-ugly
brew pin gst-plugins-good
brew pin gst-plugins-base
brew pin gst-plugins-bad
brew pin gst-libav
brew unpin gst-python
### END HACK WORKAROUND
# gstreamer plugins
RUN_IGNORE brew install gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-libav
# gst python bindings
RUN_IGNORE brew install gst-python
ARCH=$(arch)
if [ "$ARCH" = "arm64" ]
then

View File

@@ -56,6 +56,10 @@ RUN apt-get -y install \
# armv7l does not have wheels for any of these
# and compile times would forever, if it works at all.
# furthermore, it's possible to run 32bit docker on 64bit arm,
# which causes weird behavior in python which looks at the arch version
# which still reports 64bit, even if running in 32bit docker.
# this scenario is not supported and will be reported at runtime.
RUN if [ "$(uname -m)" = "armv7l" ]; \
then \
apt-get -y install \
@@ -70,7 +74,7 @@ RUN if [ "$(uname -m)" = "armv7l" ]; \
RUN python3 -m pip install --upgrade pip
# pyvips is broken on x86 due to mismatch ffi
# https://stackoverflow.com/questions/62658237/it-seems-that-the-version-of-the-libffi-library-seen-at-runtime-is-different-fro
RUN pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install --force-reinstall --no-binary :all: cffi
RUN python3 -m pip install aiofiles debugpy typing_extensions psutil
################################################################

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.119",
"version": "0.0.120",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/amcrest",
"version": "0.0.119",
"version": "0.0.120",
"license": "Apache",
"dependencies": {
"@koush/axios-digest-auth": "^0.8.5",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/amcrest",
"version": "0.0.119",
"version": "0.0.120",
"description": "Amcrest Plugin for Scrypted",
"author": "Scrypted",
"license": "Apache",

View File

@@ -616,7 +616,7 @@ class AmcrestProvider extends RtspProvider {
this.console.warn('Error probing two way audio', e);
}
}
settings.newCamera ||= 'Hikvision Camera';
settings.newCamera ||= 'Amcrest Camera';
nativeId = await super.createDevice(settings, nativeId);

View File

@@ -22,6 +22,6 @@
//"scrypted.volumeRoot": "${config:scrypted.serverRoot}/volume",
"python.analysis.extraPaths": [
"./node_modules/@scrypted/sdk/scrypted_python"
"./node_modules/@scrypted/sdk/types/scrypted_python"
]
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/arlo",
"version": "0.7.0",
"version": "0.7.4",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/arlo",
"version": "0.7.0",
"version": "0.7.4",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/arlo",
"version": "0.7.0",
"version": "0.7.4",
"description": "Arlo Plugin for Scrypted",
"keywords": [
"scrypted",

View File

@@ -29,7 +29,8 @@ from .sse_stream_async import EventStream
from .logging import logger
# Import all of the other stuff.
from datetime import datetime
from datetime import datetime, timedelta
from cachetools import cached, TTLCache
import asyncio
import sys
@@ -735,3 +736,52 @@ class Arlo(object):
"pattern": "alarm"
}
})
def GetLibrary(self, device, from_date: datetime, to_date: datetime):
"""
This call returns the following:
presignedContentUrl is a link to the actual video in Amazon AWS.
presignedThumbnailUrl is a link to the thumbnail .jpg of the actual video in Amazon AWS.
[
{
"mediaDurationSecond": 30,
"contentType": "video/mp4",
"name": "XXXXXXXXXXXXX",
"presignedContentUrl": "https://arlos3-prod-z2.s3.amazonaws.com/XXXXXXX_XXXX_XXXX_XXXX_XXXXXXXXXXXXX/XXX-XXXXXXX/XXXXXXXXXXXXX/recordings/XXXXXXXXXXXXX.mp4?AWSAccessKeyId=XXXXXXXXXXXXXXXXXXXX&Expires=1472968703&Signature=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
"lastModified": 1472881430181,
"localCreatedDate": XXXXXXXXXXXXX,
"presignedThumbnailUrl": "https://arlos3-prod-z2.s3.amazonaws.com/XXXXXXX_XXXX_XXXX_XXXX_XXXXXXXXXXXXX/XXX-XXXXXXX/XXXXXXXXXXXXX/recordings/XXXXXXXXXXXXX_thumb.jpg?AWSAccessKeyId=XXXXXXXXXXXXXXXXXXXX&Expires=1472968703&Signature=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
"reason": "motionRecord",
"deviceId": "XXXXXXXXXXXXX",
"createdBy": "XXXXXXXXXXXXX",
"createdDate": "20160903",
"timeZone": "America/Chicago",
"ownerId": "XXX-XXXXXXX",
"utcCreatedDate": XXXXXXXXXXXXX,
"currentState": "new",
"mediaDuration": "00:00:30"
}
]
"""
# give the query range a bit of buffer
from_date_internal = from_date - timedelta(days=1)
to_date_internal = to_date + timedelta(days=1)
return [
result for result in
self._getLibraryCached(from_date_internal.strftime("%Y%m%d"), to_date_internal.strftime("%Y%m%d"))
if result["deviceId"] == device["deviceId"]
and datetime.fromtimestamp(int(result["name"]) / 1000.0) <= to_date
and datetime.fromtimestamp(int(result["name"]) / 1000.0) >= from_date
]
@cached(cache=TTLCache(maxsize=512, ttl=60))
def _getLibraryCached(self, from_date: str, to_date: str):
logger.debug(f"Library cache miss for {from_date}, {to_date}")
return self.request.post(
f'https://{self.BASE_URL}/hmsweb/users/library',
{
'dateFrom': from_date,
'dateTo': to_date
}
)

View File

@@ -1,8 +1,18 @@
from __future__ import annotations
import traceback
from typing import List, TYPE_CHECKING
from scrypted_sdk import ScryptedDeviceBase
from scrypted_sdk.types import Device
from .logging import ScryptedDeviceLoggerMixin
from .util import BackgroundTaskMixin
from .provider import ArloProvider
if TYPE_CHECKING:
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from .provider import ArloProvider
class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTaskMixin):
nativeId: str = None
@@ -22,11 +32,11 @@ class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTa
self.provider = provider
self.logger.setLevel(self.provider.get_current_log_level())
def __del__(self):
def __del__(self) -> None:
self.stop_subscriptions = True
self.cancel_pending_tasks()
def get_applicable_interfaces(self) -> list:
def get_applicable_interfaces(self) -> List[str]:
"""Returns the list of Scrypted interfaces that applies to this device."""
return []
@@ -34,7 +44,7 @@ class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTa
"""Returns the Scrypted device type that applies to this device."""
return ""
def get_device_manifest(self) -> dict:
def get_device_manifest(self) -> Device:
"""Returns the Scrypted device manifest representing this device."""
parent = None
if self.arlo_device.get("parentId") and self.arlo_device["parentId"] != self.arlo_device["deviceId"]:
@@ -54,6 +64,17 @@ class ArloDeviceBase(ScryptedDeviceBase, ScryptedDeviceLoggerMixin, BackgroundTa
"providerNativeId": parent,
}
def get_builtin_child_device_manifests(self) -> list:
def get_builtin_child_device_manifests(self) -> List[Device]:
"""Returns the list of child device manifests representing hardware features built into this device."""
return []
return []
@classmethod
def async_print_exception_guard(self, fn):
"""Decorator to print an exception's stack trace before re-raising the exception."""
async def wrapped(*args, **kwargs):
try:
return await fn(*args, **kwargs)
except Exception:
traceback.print_exc()
raise
return wrapped

View File

@@ -1,20 +1,33 @@
from scrypted_sdk import ScryptedDeviceBase
from scrypted_sdk.types import DeviceProvider, ScryptedInterface, ScryptedDeviceType
from __future__ import annotations
from .device_base import ArloDeviceBase
from .siren import ArloSiren
from typing import List, TYPE_CHECKING
from scrypted_sdk import ScryptedDeviceBase
from scrypted_sdk.types import Device, DeviceProvider, ScryptedInterface, ScryptedDeviceType
from .base import ArloDeviceBase
from .vss import ArloSirenVirtualSecuritySystem
if TYPE_CHECKING:
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from .provider import ArloProvider
class ArloBasestation(ArloDeviceBase, DeviceProvider):
siren: ArloSiren = None
vss: ArloSirenVirtualSecuritySystem = None
def get_applicable_interfaces(self) -> list:
def __init__(self, nativeId: str, arlo_basestation: dict, provider: ArloProvider) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_basestation, arlo_basestation=arlo_basestation, provider=provider)
def get_applicable_interfaces(self) -> List[str]:
return [ScryptedInterface.DeviceProvider.value]
def get_device_type(self) -> str:
return ScryptedDeviceType.DeviceProvider.value
def get_builtin_child_device_manifests(self) -> list:
def get_builtin_child_device_manifests(self) -> List[Device]:
vss_id = f'{self.arlo_device["deviceId"]}.vss'
vss = self.get_or_create_vss(vss_id)
return [
{
"info": {
@@ -23,22 +36,23 @@ class ArloBasestation(ArloDeviceBase, DeviceProvider):
"firmware": self.arlo_device.get("firmwareVersion"),
"serialNumber": self.arlo_device["deviceId"],
},
"nativeId": f'{self.arlo_device["deviceId"]}.siren',
"name": f'{self.arlo_device["deviceName"]} Siren',
"interfaces": [ScryptedInterface.OnOff.value],
"type": ScryptedDeviceType.Siren.value,
"nativeId": vss_id,
"name": f'{self.arlo_device["deviceName"]} Siren Virtual Security System',
"interfaces": vss.get_applicable_interfaces(),
"type": vss.get_device_type(),
"providerNativeId": self.nativeId,
}
]
},
] + vss.get_builtin_child_device_manifests()
async def getDevice(self, nativeId: str) -> ScryptedDeviceBase:
if not nativeId.startswith(self.nativeId):
# must be a camera, so get it from the provider
return await self.provider.getDevice(nativeId)
return self.get_or_create_vss(nativeId)
if nativeId.endswith("siren"):
if not self.siren:
self.siren = ArloSiren(nativeId, self.arlo_device, self.arlo_basestation, self.provider)
return self.siren
return None
def get_or_create_vss(self, nativeId: str) -> ArloSirenVirtualSecuritySystem:
if not nativeId.endswith("vss"):
return None
if not self.vss:
self.vss = ArloSirenVirtualSecuritySystem(nativeId, self.arlo_device, self.arlo_basestation, self.provider)
return self.vss

View File

@@ -1,26 +1,32 @@
from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
import json
import threading
import time
from typing import List, TYPE_CHECKING
import scrypted_arlo_go
import scrypted_sdk
from scrypted_sdk.types import Settings, Camera, VideoCamera, MotionSensor, Battery, MediaObject, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
from scrypted_sdk.types import Setting, Settings, Camera, VideoCamera, VideoClips, VideoClip, VideoClipOptions, MotionSensor, Battery, MediaObject, ResponsePictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, ScryptedInterface, ScryptedDeviceType
from .device_base import ArloDeviceBase
from .provider import ArloProvider
from .base import ArloDeviceBase
from .child_process import HeartbeatChildProcess
from .util import BackgroundTaskMixin
if TYPE_CHECKING:
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from .provider import ArloProvider
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, MotionSensor, Battery):
class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, VideoClips, MotionSensor, Battery):
timeout: int = 30
intercom_session = None
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
self.start_motion_subscription()
self.start_battery_subscription()
@@ -42,13 +48,14 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, MotionSensor, Ba
self.provider.arlo.SubscribeToBatteryEvents(self.arlo_basestation, self.arlo_device, callback)
)
def get_applicable_interfaces(self) -> list:
def get_applicable_interfaces(self) -> List[str]:
results = set([
ScryptedInterface.VideoCamera.value,
ScryptedInterface.Camera.value,
ScryptedInterface.MotionSensor.value,
ScryptedInterface.Battery.value,
ScryptedInterface.Settings.value,
ScryptedInterface.VideoClips.value,
])
if self.two_way_audio:
@@ -85,7 +92,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, MotionSensor, Ba
else:
return True
async def getSettings(self) -> list:
async def getSettings(self) -> List[Setting]:
if self._can_push_to_talk():
return [
{
@@ -111,7 +118,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, MotionSensor, Ba
self.storage.setItem(key, value == "true")
await self.provider.discoverDevices()
async def getPictureOptions(self) -> list:
async def getPictureOptions(self) -> List[ResponsePictureOptions]:
return []
async def takePicture(self, options: dict = None) -> MediaObject:
@@ -131,7 +138,7 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, MotionSensor, Ba
return await scrypted_sdk.mediaManager.createMediaObject(str.encode(pic_url), ScryptedMimeTypes.Url.value)
async def getVideoStreamOptions(self) -> list:
async def getVideoStreamOptions(self) -> List[ResponseMediaStreamOptions]:
return [
{
"id": 'default',
@@ -200,21 +207,85 @@ class ArloCamera(ArloDeviceBase, Settings, Camera, VideoCamera, MotionSensor, Ba
except Exception as e:
self.logger.error(e)
async def startIntercom(self, media):
async def startIntercom(self, media) -> None:
self.logger.info("Starting intercom")
self.intercom_session = ArloCameraRTCSignalingSession(self)
await self.intercom_session.initialize_push_to_talk(media)
async def stopIntercom(self):
async def stopIntercom(self) -> None:
self.logger.info("Stopping intercom")
if self.intercom_session is not None:
await self.intercom_session.shutdown()
self.intercom_session = None
def _can_push_to_talk(self):
def _can_push_to_talk(self) -> bool:
# Right now, only implement push to talk for basestation cameras
return self.arlo_device["deviceId"] != self.arlo_device["parentId"]
async def getVideoClip(self, videoId: str) -> MediaObject:
self.logger.info(f"Getting video clip {videoId}")
id_as_time = int(videoId) / 1000.0
start = datetime.fromtimestamp(id_as_time) - timedelta(seconds=10)
end = datetime.fromtimestamp(id_as_time) + timedelta(seconds=10)
library = self.provider.arlo.GetLibrary(self.arlo_device, start, end)
for recording in library:
if videoId == recording["name"]:
return await scrypted_sdk.mediaManager.createMediaObjectFromUrl(recording["presignedContentUrl"])
self.logger.warn(f"Clip {videoId} not found")
return None
async def getVideoClipThumbnail(self, thumbnailId: str) -> MediaObject:
self.logger.info(f"Getting video clip thumbnail {thumbnailId}")
id_as_time = int(thumbnailId) / 1000.0
start = datetime.fromtimestamp(id_as_time) - timedelta(seconds=10)
end = datetime.fromtimestamp(id_as_time) + timedelta(seconds=10)
library = self.provider.arlo.GetLibrary(self.arlo_device, start, end)
for recording in library:
if thumbnailId == recording["name"]:
return await scrypted_sdk.mediaManager.createMediaObjectFromUrl(recording["presignedThumbnailUrl"])
self.logger.warn(f"Clip thumbnail {thumbnailId} not found")
return None
async def getVideoClips(self, options: VideoClipOptions = None) -> List[VideoClip]:
self.logger.info(f"Fetching remote video clips {options}")
start = datetime.fromtimestamp(options["startTime"] / 1000.0)
end = datetime.fromtimestamp(options["endTime"] / 1000.0)
library = self.provider.arlo.GetLibrary(self.arlo_device, start, end)
clips = []
for recording in library:
clip = {
"duration": recording["mediaDurationSecond"] * 1000.0,
"id": recording["name"],
"thumbnailId": recording["name"],
"videoId": recording["name"],
"startTime": recording["utcCreatedDate"],
"description": recording["reason"],
"resources": {
"thumbnail": {
"href": recording["presignedThumbnailUrl"],
},
"video": {
"href": recording["presignedContentUrl"],
},
},
}
clips.append(clip)
if options.get("reverseOrder"):
clips.reverse()
return clips
async def removeVideoClips(self, videoClipIds: List[str]) -> None:
# Arlo does support deleting, but let's be safe and disable that
self.logger.error("deleting Arlo video clips is not implemented by this plugin")
raise Exception("deleting Arlo video clips is not implemented by this plugin")
class ArloCameraRTCSignalingSession(BackgroundTaskMixin):
def __init__(self, camera):

View File

@@ -1,13 +1,19 @@
from scrypted_sdk.types import BinarySensor, ScryptedInterface
from __future__ import annotations
from typing import List, TYPE_CHECKING
from scrypted_sdk.types import BinarySensor, ScryptedInterface, ScryptedDeviceType
from .camera import ArloCamera
from .provider import ArloProvider
if TYPE_CHECKING:
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from .provider import ArloProvider
class ArloDoorbell(ArloCamera, BinarySensor):
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
self.start_doorbell_subscription()
def start_doorbell_subscription(self) -> None:
@@ -19,7 +25,10 @@ class ArloDoorbell(ArloCamera, BinarySensor):
self.provider.arlo.SubscribeToDoorbellEvents(self.arlo_basestation, self.arlo_device, callback)
)
def get_applicable_interfaces(self) -> list:
def get_device_type(self) -> str:
return ScryptedDeviceType.Doorbell.value
def get_applicable_interfaces(self) -> List[str]:
camera_interfaces = super().get_applicable_interfaces()
camera_interfaces.append(ScryptedInterface.BinarySensor.value)

View File

@@ -6,16 +6,21 @@ import logging
import re
import requests
import traceback
from typing import List
import scrypted_sdk
from scrypted_sdk import ScryptedDeviceBase
from scrypted_sdk.types import Settings, DeviceProvider, DeviceDiscovery, ScryptedInterface, ScryptedDeviceType
from scrypted_sdk.types import Setting, SettingValue, Settings, DeviceProvider, DeviceDiscovery, ScryptedInterface
from .arlo import Arlo
from .arlo.arlo_async import change_stream_class
from .arlo.logging import logger as arlo_lib_logger
from .logging import ScryptedDeviceLoggerMixin
from .util import BackgroundTaskMixin
from .camera import ArloCamera
from .doorbell import ArloDoorbell
from .basestation import ArloBasestation
from .base import ArloDeviceBase
class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery, ScryptedDeviceLoggerMixin, BackgroundTaskMixin):
@@ -366,7 +371,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
self.logger.info(f"Exiting IMAP refresh loop {id(imap_signal)}")
return
async def getSettings(self) -> list:
async def getSettings(self) -> List[Setting]:
results = [
{
"group": "General",
@@ -477,7 +482,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
return results
async def putSetting(self, key, value) -> None:
async def putSetting(self, key: str, value: SettingValue) -> None:
if not self.validate_setting(key, value):
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
return
@@ -523,7 +528,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
_ = self.arlo
await self.onDeviceEvent(ScryptedInterface.Settings.value, None)
def validate_setting(self, key: str, val: str) -> bool:
def validate_setting(self, key: str, val: SettingValue) -> bool:
if key == "refresh_interval":
try:
val = int(val)
@@ -570,53 +575,65 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
nativeId = basestation["deviceId"]
if nativeId in self.arlo_basestations:
self.logger.info(f"Skipping basestation {nativeId} as it already exists")
self.logger.info(f"Skipping basestation {nativeId} ({basestation['modelId']}) as it has already been added")
continue
self.arlo_basestations[nativeId] = basestation
device = await self.getDevice(nativeId)
scrypted_interfaces = device.get_applicable_interfaces()
manifest = device.get_device_manifest()
self.logger.debug(f"Interfaces for {nativeId} ({basestation['modelId']}): {scrypted_interfaces}")
self.logger.info(f"Interfaces for {nativeId} ({basestation['modelId']}): {scrypted_interfaces}")
# for basestations, we want to add them to the top level DeviceProvider
provider_to_device_map.setdefault(None, []).append(manifest)
# add any builtin child devices
provider_to_device_map.setdefault(nativeId, []).extend(device.get_builtin_child_device_manifests())
# we also want to trickle discover them so they are added without deleting all existing
# we want to trickle discover them so they are added without deleting all existing
# root level devices - this is for backward compatibility
await scrypted_sdk.deviceManager.onDeviceDiscovered(manifest)
# add any builtin child devices and trickle discover them
child_manifests = device.get_builtin_child_device_manifests()
for child_manifest in child_manifests:
await scrypted_sdk.deviceManager.onDeviceDiscovered(child_manifest)
provider_to_device_map.setdefault(child_manifest["providerNativeId"], []).append(child_manifest)
self.logger.info(f"Discovered {len(basestations)} basestations")
cameras = self.arlo.GetDevices(['camera', "arloq", "arloqs", "doorbell"])
for camera in cameras:
if camera["deviceId"] != camera["parentId"] and camera["parentId"] not in self.arlo_basestations:
self.logger.info(f"Skipping camera {camera['deviceId']} because its basestation was not found")
self.logger.info(f"Skipping camera {camera['deviceId']} ({camera['modelId']}) because its basestation was not found")
continue
nativeId = camera["deviceId"]
if nativeId in self.arlo_cameras:
self.logger.info(f"Skipping camera {nativeId} as it already exists")
self.logger.info(f"Skipping camera {nativeId} ({camera['modelId']}) as it has already been added")
continue
self.arlo_cameras[nativeId] = camera
device = await self.getDevice(nativeId)
scrypted_interfaces = device.get_applicable_interfaces()
manifest = device.get_device_manifest()
self.logger.debug(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
if camera["deviceId"] == camera["parentId"]:
# these are standalone cameras with no basestation, so they act as their
# own basestation
self.arlo_basestations[camera["deviceId"]] = camera
device: ArloDeviceBase = await self.getDevice(nativeId)
scrypted_interfaces = device.get_applicable_interfaces()
manifest = device.get_device_manifest()
self.logger.info(f"Interfaces for {nativeId} ({camera['modelId']}): {scrypted_interfaces}")
if camera["deviceId"] == camera["parentId"]:
provider_to_device_map.setdefault(None, []).append(manifest)
else:
provider_to_device_map.setdefault(camera["parentId"], []).append(manifest)
# add any builtin child devices
provider_to_device_map.setdefault(nativeId, []).extend(device.get_builtin_child_device_manifests())
# trickle discover this camera so it exists for later steps
await scrypted_sdk.deviceManager.onDeviceDiscovered(manifest)
# add any builtin child devices and trickle discover them
child_manifests = device.get_builtin_child_device_manifests()
for child_manifest in child_manifests:
await scrypted_sdk.deviceManager.onDeviceDiscovered(child_manifest)
provider_to_device_map.setdefault(child_manifest["providerNativeId"], []).append(child_manifest)
camera_devices.append(manifest)
@@ -638,7 +655,7 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
"devices": provider_to_device_map[None]
})
async def getDevice(self, nativeId: str) -> ScryptedDeviceBase:
async def getDevice(self, nativeId: str) -> ArloDeviceBase:
ret = self.scrypted_devices.get(nativeId, None)
if ret is None:
ret = self.create_device(nativeId)
@@ -646,21 +663,19 @@ class ArloProvider(ScryptedDeviceBase, Settings, DeviceProvider, DeviceDiscovery
self.scrypted_devices[nativeId] = ret
return ret
def create_device(self, nativeId: str) -> ScryptedDeviceBase:
from .camera import ArloCamera
from .doorbell import ArloDoorbell
from .basestation import ArloBasestation
def create_device(self, nativeId: str) -> ArloDeviceBase:
if nativeId not in self.arlo_cameras and nativeId not in self.arlo_basestations:
self.logger.warning(f"Cannot create device for nativeId {nativeId}, maybe it hasn't been loaded yet?")
return None
arlo_device = self.arlo_cameras.get(nativeId)
if not arlo_device:
# this is a basestation, so build the basestation object
arlo_device = self.arlo_basestations[nativeId]
return ArloBasestation(nativeId, arlo_device, arlo_device, self)
return ArloBasestation(nativeId, arlo_device, self)
if arlo_device["parentId"] not in self.arlo_basestations:
self.logger.warning(f"Cannot create camera with nativeId {nativeId} when {arlo_device['parentId']} is not a valid basestation")
return None
arlo_basestation = self.arlo_basestations[arlo_device["parentId"]]

View File

@@ -1,17 +1,61 @@
from scrypted_sdk.types import OnOff, ScryptedInterface
from __future__ import annotations
from .device_base import ArloDeviceBase
from typing import List, TYPE_CHECKING
from scrypted_sdk.types import OnOff, SecuritySystemMode, ScryptedInterface, ScryptedDeviceType
from .base import ArloDeviceBase
if TYPE_CHECKING:
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from .provider import ArloProvider
from .vss import ArloSirenVirtualSecuritySystem
class ArloSiren(ArloDeviceBase, OnOff):
vss: ArloSirenVirtualSecuritySystem = None
def get_applicable_interfaces(self) -> list:
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider, vss: ArloSirenVirtualSecuritySystem) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
self.vss = vss
def get_applicable_interfaces(self) -> List[str]:
return [ScryptedInterface.OnOff.value]
def get_device_type(self) -> str:
return ScryptedDeviceType.Siren.value
@ArloDeviceBase.async_print_exception_guard
async def turnOn(self) -> None:
self.logger.info("Turning on")
if self.vss.securitySystemState["mode"] == SecuritySystemMode.Disarmed.value:
self.logger.info("Virtual security system is disarmed, ignoring trigger")
# set and unset this property to force homekit to display the
# switch as off
self.on = True
self.on = False
self.vss.securitySystemState = {
**self.vss.securitySystemState,
"triggered": False,
}
return
self.provider.arlo.SirenOn(self.arlo_device)
self.on = True
self.vss.securitySystemState = {
**self.vss.securitySystemState,
"triggered": True,
}
@ArloDeviceBase.async_print_exception_guard
async def turnOff(self) -> None:
self.logger.info("Turning off")
self.provider.arlo.SirenOff(self.arlo_device)
self.provider.arlo.SirenOff(self.arlo_device)
self.on = False
self.vss.securitySystemState = {
**self.vss.securitySystemState,
"triggered": False,
}

View File

@@ -2,12 +2,12 @@ import asyncio
class BackgroundTaskMixin:
def create_task(self, coroutine):
def create_task(self, coroutine) -> asyncio.Task:
task = asyncio.get_event_loop().create_task(coroutine)
self.register_task(task)
return task
def register_task(self, task):
def register_task(self, task) -> None:
if not hasattr(self, "background_tasks"):
self.background_tasks = set()
@@ -21,6 +21,8 @@ class BackgroundTaskMixin:
task.add_done_callback(print_exception)
task.add_done_callback(self.background_tasks.discard)
def cancel_pending_tasks(self):
def cancel_pending_tasks(self) -> None:
if not hasattr(self, "background_tasks"):
return
for task in self.background_tasks:
task.cancel()

View File

@@ -0,0 +1,150 @@
from __future__ import annotations
import asyncio
from typing import List, TYPE_CHECKING
from scrypted_sdk.types import Device, DeviceProvider, Setting, Settings, SettingValue, SecuritySystem, SecuritySystemMode, Readme, ScryptedInterface, ScryptedDeviceType
from .base import ArloDeviceBase
from .siren import ArloSiren
if TYPE_CHECKING:
# https://adamj.eu/tech/2021/05/13/python-type-hints-how-to-fix-circular-imports/
from .provider import ArloProvider
class ArloSirenVirtualSecuritySystem(ArloDeviceBase, SecuritySystem, DeviceProvider):
"""A virtual, emulated security system that controls when scrypted events can trip the real physical siren."""
SUPPORTED_MODES = [SecuritySystemMode.AwayArmed.value, SecuritySystemMode.HomeArmed.value, SecuritySystemMode.Disarmed.value]
siren: ArloSiren = None
def __init__(self, nativeId: str, arlo_device: dict, arlo_basestation: dict, provider: ArloProvider) -> None:
super().__init__(nativeId=nativeId, arlo_device=arlo_device, arlo_basestation=arlo_basestation, provider=provider)
self.create_task(self.delayed_init())
@property
def mode(self) -> str:
mode = self.storage.getItem("mode")
if mode is None or mode not in ArloSirenVirtualSecuritySystem.SUPPORTED_MODES:
mode = SecuritySystemMode.Disarmed.value
return mode
@mode.setter
def mode(self, mode: str) -> None:
if mode not in ArloSirenVirtualSecuritySystem.SUPPORTED_MODES:
raise ValueError(f"invalid mode {mode}")
self.storage.setItem("mode", mode)
self.securitySystemState = {
**self.securitySystemState,
"mode": mode,
}
self.create_task(self.onDeviceEvent(ScryptedInterface.Settings.value, None))
async def delayed_init(self) -> None:
iterations = 1
while not self.stop_subscriptions:
if iterations > 100:
self.logger.error("Delayed init exceeded iteration limit, giving up")
return
try:
self.securitySystemState = {
"supportedModes": ArloSirenVirtualSecuritySystem.SUPPORTED_MODES,
"mode": self.mode,
}
return
except Exception as e:
self.logger.info(f"Delayed init failed, will try again: {e}")
await asyncio.sleep(0.1)
iterations += 1
def get_applicable_interfaces(self) -> List[str]:
return [
ScryptedInterface.SecuritySystem.value,
ScryptedInterface.DeviceProvider.value,
ScryptedInterface.Settings.value,
ScryptedInterface.Readme.value,
]
def get_device_type(self) -> str:
return ScryptedDeviceType.SecuritySystem.value
def get_builtin_child_device_manifests(self) -> List[Device]:
siren = self.get_or_create_siren()
return [
{
"info": {
"model": f"{self.arlo_device['modelId']} {self.arlo_device['properties'].get('hwVersion', '')}".strip(),
"manufacturer": "Arlo",
"firmware": self.arlo_device.get("firmwareVersion"),
"serialNumber": self.arlo_device["deviceId"],
},
"nativeId": siren.nativeId,
"name": f'{self.arlo_device["deviceName"]} Siren',
"interfaces": siren.get_applicable_interfaces(),
"type": siren.get_device_type(),
"providerNativeId": self.nativeId,
}
]
async def getSettings(self) -> List[Setting]:
return [
{
"key": "mode",
"title": "Arm Mode",
"description": "If disarmed, the associated siren will not be physically triggered even if toggled.",
"value": self.mode,
"choices": ArloSirenVirtualSecuritySystem.SUPPORTED_MODES,
},
]
async def putSetting(self, key: str, value: SettingValue) -> None:
if key != "mode":
raise ValueError(f"invalid setting {key}")
self.mode = value
if self.mode == SecuritySystemMode.Disarmed.value:
await self.get_or_create_siren().turnOff()
async def getReadmeMarkdown(self) -> str:
return """
# Virtual Security System for Arlo Sirens
This security system device is not a real physical device, but a virtual, emulated device provided by the Arlo Scrypted plugin. Its purpose is to grant security system semantics of Arm/Disarm to avoid the accidental, unwanted triggering of the real physical siren through integrations such as Homekit.
To allow the siren to trigger, set the Arm Mode to any of the Armed options. When Disarmed, any triggers of the siren will be ignored. Switching modes will not perform any changes to Arlo cloud or your Arlo account, but rather only to this Scrypted device.
If this virtual security system is synced to Homekit, the siren device will be merged into the same security system accessory as a switch. The siren device will not be added as a separate accessory. To access the siren as a switch without the security system, disable syncing of the virtual security system and enable syncing of the siren, then ensure that the virtual security system is armed manually in its settings in Scrypted.
""".strip()
async def getDevice(self, nativeId: str) -> ArloDeviceBase:
if not nativeId.endswith("siren"):
return None
return self.get_or_create_siren()
def get_or_create_siren(self) -> ArloSiren:
siren_id = f'{self.arlo_device["deviceId"]}.siren'
if not self.siren:
self.siren = ArloSiren(siren_id, self.arlo_device, self.arlo_basestation, self.provider, self)
return self.siren
async def armSecuritySystem(self, mode: SecuritySystemMode) -> None:
self.logger.info(f"Arming {mode}")
self.mode = mode
self.securitySystemState = {
**self.securitySystemState,
"mode": mode,
}
if mode == SecuritySystemMode.Disarmed.value:
await self.get_or_create_siren().turnOff()
@ArloDeviceBase.async_print_exception_guard
async def disarmSecuritySystem(self) -> None:
self.logger.info(f"Disarming")
self.mode = SecuritySystemMode.Disarmed.value
self.securitySystemState = {
**self.securitySystemState,
"mode": SecuritySystemMode.Disarmed.value,
}
await self.get_or_create_siren().turnOff()

View File

@@ -1,6 +1,7 @@
paho-mqtt==1.6.1
sseclient==0.0.22
requests
cachetools
scrypted-arlo-go==0.0.1
--extra-index-url=https://www.piwheels.org/simple/
--extra-index-url=https://bjia56.github.io/scrypted-arlo-go/

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/homekit",
"version": "1.2.20",
"version": "1.2.21",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/homekit",
"version": "1.2.20",
"version": "1.2.21",
"dependencies": {
"@koush/werift-src": "file:../../external/werift",
"check-disk-space": "^3.3.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/homekit",
"version": "1.2.20",
"version": "1.2.21",
"description": "HomeKit Plugin for Scrypted",
"scripts": {
"scrypted-setup-project": "scrypted-setup-project",

View File

@@ -1,5 +1,5 @@
import { Deferred } from '@scrypted/common/src/deferred';
import sdk, { AudioSensor, Camera, Intercom, MotionSensor, ObjectsDetected, OnOff, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, VideoCamera, VideoCameraConfiguration } from '@scrypted/sdk';
import sdk, { AudioSensor, Camera, Intercom, MotionSensor, ObjectsDetected, OnOff, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, DeviceProvider, VideoCamera, VideoCameraConfiguration } from '@scrypted/sdk';
import { defaultObjectDetectionContactSensorTimeout } from '../camera-mixin';
import { addSupportedType, bindCharacteristic, DummyDevice } from '../common';
import { AudioRecordingCodec, AudioRecordingCodecType, AudioRecordingSamplerate, AudioStreamingCodec, AudioStreamingCodecType, AudioStreamingSamplerate, CameraController, CameraRecordingConfiguration, CameraRecordingDelegate, CameraRecordingOptions, CameraStreamingOptions, Characteristic, CharacteristicEventTypes, H264Level, H264Profile, MediaContainerType, OccupancySensor, RecordingPacket, Service, SRTPCryptoSuites, VideoCodecType, WithUUID } from '../hap';
@@ -7,7 +7,7 @@ import type { HomeKitPlugin } from '../main';
import { handleFragmentsRequests, iframeIntervalSeconds } from './camera/camera-recording';
import { createCameraStreamingDelegate } from './camera/camera-streaming';
import { FORCE_OPUS } from './camera/camera-utils';
import { makeAccessory } from './common';
import { makeAccessory, mergeOnOffDevicesByType } from './common';
const { deviceManager, systemManager } = sdk;
@@ -303,6 +303,15 @@ addSupportedType({
}
}
if (device.interfaces.includes(ScryptedInterface.DeviceProvider)) {
// merge in lights
const { devices } = mergeOnOffDevicesByType(device as ScryptedDevice as ScryptedDevice & DeviceProvider, accessory, ScryptedDeviceType.Light);
// ensure child devices are skipped by the rest of homekit by
// reporting that they've been merged
devices.map(device => homekitPlugin.mergedDevices.add(device.id));
}
return accessory;
}
});

View File

@@ -15,9 +15,9 @@ import os from 'os';
import { getAddressOverride } from '../../address-override';
import { AudioStreamingCodecType, CameraController, CameraStreamingDelegate, PrepareStreamCallback, PrepareStreamRequest, PrepareStreamResponse, StartStreamRequest, StreamingRequest, StreamRequestCallback, StreamRequestTypes } from '../../hap';
import type { HomeKitPlugin } from "../../main";
import { createReturnAudioSdp } from './camera-return-audio';
import { createSnapshotHandler } from '../camera/camera-snapshot';
import { getDebugMode } from './camera-debug-mode-storage';
import { createReturnAudioSdp } from './camera-return-audio';
import { startCameraStreamFfmpeg } from './camera-streaming-ffmpeg';
import { CameraStreamingSession } from './camera-streaming-session';
import { getStreamingConfiguration } from './camera-utils';
@@ -375,6 +375,12 @@ export function createCameraStreamingDelegate(device: ScryptedDevice & VideoCame
let playing = false;
session.audioReturn.once('message', async buffer => {
try {
const decrypted = srtpSession.decrypt(buffer);
const rtp = RtpPacket.deSerialize(decrypted);
if (rtp.header.payloadType !== session.startRequest.audio.pt)
return;
const { clientPromise, url } = await listenZeroSingleClient();
const rtspUrl = url.replace('tcp', 'rtsp');
let sdp = createReturnAudioSdp(session.startRequest.audio);

View File

@@ -167,29 +167,30 @@ export function addFan(device: ScryptedDevice & Fan & OnOff, accessory: Accessor
}
/*
* addChildSirens looks for siren-type child devices of the given device provider
* and merges them as switches to the accessory represented by the device provider.
* mergeOnOffDevicesByType looks for the specified type of child devices under the
* given device provider and merges them as switches to the accessory represented
* by the device provider.
*
* Returns the services created as well as all of the child siren devices which have
* Returns the services created as well as all of the child OnOff devices which have
* been merged.
*/
export function addChildSirens(device: ScryptedDevice & DeviceProvider, accessory: Accessory): { services: Service[], devices: (ScryptedDevice & OnOff)[] } {
export function mergeOnOffDevicesByType(device: ScryptedDevice & DeviceProvider, accessory: Accessory, type: ScryptedDeviceType): { services: Service[], devices: (ScryptedDevice & OnOff)[] } {
if (!device.interfaces.includes(ScryptedInterface.DeviceProvider))
return undefined;
const children = getChildDevices(device);
const sirenDevices = [];
const mergedDevices = [];
const services = children.map((child: ScryptedDevice & OnOff) => {
if (child.type !== ScryptedDeviceType.Siren || !child.interfaces.includes(ScryptedInterface.OnOff))
if (child.type !== type || !child.interfaces.includes(ScryptedInterface.OnOff))
return undefined;
const onOffService = getOnOffService(child, accessory, Service.Switch)
sirenDevices.push(child);
mergedDevices.push(child);
return onOffService;
});
return {
services: services.filter(service => !!service),
devices: sirenDevices,
devices: mergedDevices,
};
}

View File

@@ -1,7 +1,7 @@
import { SecuritySystem, SecuritySystemMode, SecuritySystemObstruction, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, DeviceProvider } from '@scrypted/sdk';
import { addSupportedType, bindCharacteristic, DummyDevice } from '../common';
import { Characteristic, CharacteristicEventTypes, CharacteristicSetCallback, CharacteristicValue, Service } from '../hap';
import { makeAccessory, addChildSirens } from './common';
import { makeAccessory, mergeOnOffDevicesByType } from './common';
import type { HomeKitPlugin } from "../main";
addSupportedType({
@@ -90,7 +90,8 @@ addSupportedType({
() => !!device.securitySystemState?.triggered);
if (device.interfaces.includes(ScryptedInterface.DeviceProvider)) {
const { devices } = addChildSirens(device as ScryptedDevice as ScryptedDevice & DeviceProvider, accessory);
// merge in sirens
const { devices } = mergeOnOffDevicesByType(device as ScryptedDevice as ScryptedDevice & DeviceProvider, accessory, ScryptedDeviceType.Siren);
// ensure child devices are skipped by the rest of homekit by
// reporting that they've been merged

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/objectdetector",
"version": "0.0.113",
"version": "0.0.119",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/objectdetector",
"version": "0.0.113",
"version": "0.0.119",
"license": "Apache-2.0",
"dependencies": {
"@scrypted/common": "file:../../common",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/objectdetector",
"version": "0.0.113",
"version": "0.0.119",
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
"author": "Scrypted",
"license": "Apache-2.0",

View File

@@ -1,6 +1,7 @@
import { Deferred } from "@scrypted/common/src/deferred";
import { ffmpegLogInitialOutput, safeKillFFmpeg } from "@scrypted/common/src/media-helpers";
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
import { readLength, readLine } from "@scrypted/common/src/read-stream";
import { addVideoFilterArguments } from "@scrypted/common/src/ffmpeg-helpers";
import sdk, { FFmpegInput, Image, ImageOptions, MediaObject, ScryptedDeviceBase, ScryptedMimeTypes, VideoFrame, VideoFrameGenerator, VideoFrameGeneratorOptions } from "@scrypted/sdk";
import child_process from 'child_process';
import sharp from 'sharp';
@@ -29,7 +30,7 @@ interface RawFrame {
}
class VipsImage implements Image {
constructor(public image: sharp.Sharp, public width: number, public height: number) {
constructor(public image: sharp.Sharp, public width: number, public height: number, public channels: number) {
}
toImageInternal(options: ImageOptions) {
@@ -54,12 +55,18 @@ class VipsImage implements Image {
async toBuffer(options: ImageOptions) {
const transformed = this.toImageInternal(options);
if (options?.format === 'rgb') {
transformed.removeAlpha().toFormat('raw');
}
else if (options?.format === 'jpg') {
if (options?.format === 'jpg') {
transformed.toFormat('jpg');
}
else {
if (this.channels === 1 && (options?.format === 'gray' || !options.format))
transformed.extractChannel(0);
else if (options?.format === 'gray')
transformed.toColorspace('b-w');
else if (options?.format === 'rgb')
transformed.removeAlpha()
transformed.raw();
}
return transformed.toBuffer();
}
@@ -74,7 +81,7 @@ class VipsImage implements Image {
});
const newMetadata = await newImage.metadata();
const newVipsImage = new VipsImage(newImage, newMetadata.width, newMetadata.height);
const newVipsImage = new VipsImage(newImage, newMetadata.width, newMetadata.height, newMetadata.channels);
return newVipsImage;
}
@@ -89,20 +96,27 @@ class VipsImage implements Image {
export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements VideoFrameGenerator {
async *generateVideoFramesInternal(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): AsyncGenerator<VideoFrame & MediaObject, any, unknown> {
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(mediaObject, ScryptedMimeTypes.FFmpegInput);
const gray = options?.format === 'gray';
const channels = gray ? 1 : 3;
const args = [
'-hide_banner',
//'-hwaccel', 'auto',
...ffmpegInput.inputArguments,
'-vcodec', 'pam',
'-pix_fmt', 'rgb24',
'-pix_fmt', gray ? 'gray' : 'rgb24',
'-f', 'image2pipe',
'pipe:3',
];
// this seems to reduce latency.
addVideoFilterArguments(args, 'fps=10', 'fps');
const cp = child_process.spawn(await sdk.mediaManager.getFFmpegPath(), args, {
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
});
ffmpegLogInitialOutput(this.console, cp);
const console = mediaObject?.sourceId ? sdk.deviceManager.getMixinConsole(mediaObject.sourceId) : this.console;
safePrintFFmpegArguments(console, args);
ffmpegLogInitialOutput(console, cp);
let finished = false;
let frameDeferred: Deferred<RawFrame>;
@@ -121,7 +135,7 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
}
if (headers['TUPLTYPE'] !== 'RGB')
if (headers['TUPLTYPE'] !== 'RGB' && headers['TUPLTYPE'] !== 'GRAYSCALE')
throw new Error(`Unexpected TUPLTYPE in PAM stream: ${headers['TUPLTYPE']}`);
const width = parseInt(headers['WIDTH']);
@@ -129,7 +143,7 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
if (!width || !height)
throw new Error('Invalid dimensions in PAM stream');
const length = width * height * 3;
const length = width * height * channels;
headers.clear();
const data = await readLength(readable, length);
@@ -150,7 +164,7 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
catch (e) {
}
finally {
this.console.log('finished reader');
console.log('finished reader');
finished = true;
frameDeferred?.reject(new Error('frame generator finished'));
}
@@ -167,20 +181,24 @@ export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements Vid
raw: {
width,
height,
channels: 3,
channels,
}
});
const vipsImage = new VipsImage(image, width, height);
const mo = await createVipsMediaObject(vipsImage);
yield mo;
vipsImage.image.destroy();
vipsImage.image = undefined;
const vipsImage = new VipsImage(image, width, height, channels);
try {
const mo = await createVipsMediaObject(vipsImage);
yield mo;
}
finally {
vipsImage.image = undefined;
image.destroy();
}
}
}
catch (e) {
}
finally {
this.console.log('finished generator');
console.log('finished generator');
finished = true;
safeKillFFmpeg(cp);
}

View File

@@ -1,10 +1,9 @@
import sdk, { Camera, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
import sdk, { Camera, DeviceProvider, DeviceState, EventListenerRegister, MediaObject, MediaStreamDestination, MixinDeviceBase, MixinProvider, MotionSensor, ObjectDetection, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionTypes, ObjectDetector, ObjectsDetected, ScryptedDevice, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, ScryptedNativeId, Setting, Settings, SettingValue, VideoCamera, VideoFrame, VideoFrameGenerator } from '@scrypted/sdk';
import { StorageSettings } from '@scrypted/sdk/storage-settings';
import crypto from 'crypto';
import cloneDeep from 'lodash/cloneDeep';
import { AutoenableMixinProvider } from "../../../common/src/autoenable-mixin-provider";
import { SettingsMixinDeviceBase } from "../../../common/src/settings-mixin";
import { DenoisedDetectionEntry, DenoisedDetectionState, denoiseDetections } from './denoise';
import { DenoisedDetectionState } from './denoise';
import { FFmpegVideoFrameGenerator } from './ffmpeg-videoframes';
import { serverSupportsMixinEventMasking } from './server-version';
import { sleep } from './sleep';
@@ -19,8 +18,6 @@ const defaultDetectionDuration = 20;
const defaultDetectionInterval = 60;
const defaultDetectionTimeout = 60;
const defaultMotionDuration = 10;
const defaultScoreThreshold = .2;
const defaultSecondScoreThreshold = .7;
const BUILTIN_MOTION_SENSOR_ASSIST = 'Assist';
const BUILTIN_MOTION_SENSOR_REPLACE = 'Replace';
@@ -44,9 +41,8 @@ type TrackedDetection = ObjectDetectionResult & {
bestSecondPassScore?: number;
};
class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera & MotionSensor & ObjectDetector> implements ObjectDetector, Settings, ObjectDetectionCallbacks {
class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera & MotionSensor & ObjectDetector> implements ObjectDetector, Settings {
motionListener: EventListenerRegister;
detectorListener: EventListenerRegister;
motionMixinListener: EventListenerRegister;
detections = new Map<string, MediaObject>();
cameraDevice: ScryptedDevice & Camera & VideoCamera & MotionSensor & ObjectDetector;
@@ -81,16 +77,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
this.maybeStartMotionDetection();
}
},
captureMode: {
title: 'Capture Mode',
description: 'The method to capture frames for analysis. Video will require more processing power.',
choices: [
'Default',
'Video',
'Snapshot',
],
defaultValue: 'Default',
},
detectionDuration: {
title: 'Detection Duration',
subgroup: 'Advanced',
@@ -121,23 +107,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
defaultValue: defaultDetectionInterval,
hide: true,
},
scoreThreshold: {
title: 'Minimum Detection Confidence',
subgroup: 'Advanced',
description: 'Higher values eliminate false positives and low quality recognition candidates.',
type: 'number',
placeholder: '.2',
defaultValue: defaultScoreThreshold,
},
secondScoreThreshold: {
title: 'Second Pass Confidence',
subgroup: 'Advanced',
description: 'Crop and reanalyze a result from the initial detection pass to get more accurate results.',
key: 'secondScoreThreshold',
type: 'number',
defaultValue: defaultSecondScoreThreshold,
placeholder: '.7',
},
});
motionTimeout: NodeJS.Timeout;
zones = this.getZones();
@@ -178,7 +147,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (this.hasMotionType) {
// force a motion detection restart if it quit
if (this.motionSensorSupplementation === BUILTIN_MOTION_SENSOR_REPLACE)
await this.startStreamAnalysis();
await this.startPipelineAnalysis();
return;
}
}, this.storageSettings.values.detectionInterval * 1000);
@@ -216,30 +185,16 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return ret;
}
async snapshotDetection() {
const picture = await this.cameraDevice.takePicture();
let detections = await this.objectDetection.detectObjects(picture, {
detectionId: this.detectionId,
settings: this.getCurrentSettings(),
});
detections = await this.trackObjects(detections, true);
this.reportObjectDetections(detections);
}
async maybeStartMotionDetection() {
if (!this.hasMotionType)
return;
if (this.motionSensorSupplementation !== BUILTIN_MOTION_SENSOR_REPLACE)
return;
await this.startStreamAnalysis();
await this.startPipelineAnalysis();
}
endObjectDetection() {
this.detectorRunning = false;
this.objectDetection?.detectObjects(undefined, {
detectionId: this.detectionId,
settings: this.getCurrentSettings(),
});
}
bindObjectDetection() {
@@ -247,60 +202,30 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
this.motionDetected = false;
this.detectorRunning = false;
this.detectorListener?.removeListener();
this.detectorListener = undefined;
this.endObjectDetection();
this.maybeStartMotionDetection();
}
async register() {
const model = await this.objectDetection.getDetectionModel();
if (!this.hasMotionType) {
if (model.triggerClasses?.includes('motion')) {
this.motionListener = this.cameraDevice.listen(ScryptedInterface.MotionSensor, async () => {
if (!this.cameraDevice.motionDetected) {
if (this.detectorRunning) {
// allow anaysis due to user request.
if (this.analyzeStop > Date.now())
return;
this.console.log('motion stopped, cancelling ongoing detection')
this.endObjectDetection();
}
return;
}
await this.startStreamAnalysis();
});
}
const nonMotion = model.triggerClasses?.find(t => t !== 'motion');
if (nonMotion) {
this.detectorListener = this.cameraDevice.listen(ScryptedInterface.ObjectDetector, async (s, d, data: ObjectsDetected) => {
if (!model.triggerClasses)
return;
if (!data.detectionId)
return;
const { detections } = data;
if (!detections?.length)
return;
const set = new Set(detections.map(d => d.className));
for (const trigger of model.triggerClasses) {
if (trigger === 'motion')
continue;
if (set.has(trigger)) {
const jpeg = await this.cameraDevice.getDetectionInput(data.detectionId, data.eventId);
const found = await this.objectDetection.detectObjects(jpeg);
found.detectionId = data.detectionId;
this.handleDetectionEvent(found, undefined, jpeg);
this.motionListener = this.cameraDevice.listen(ScryptedInterface.MotionSensor, async () => {
if (!this.cameraDevice.motionDetected) {
if (this.detectorRunning) {
// allow anaysis due to user request.
if (this.analyzeStop > Date.now())
return;
}
this.console.log('motion stopped, cancelling ongoing detection')
this.endObjectDetection();
}
});
}
return;
}
await this.startPipelineAnalysis();
});
return;
}
@@ -317,7 +242,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
return;
if (!this.detectorRunning)
this.console.log('built in motion sensor started motion, starting video detection.');
await this.startStreamAnalysis();
await this.startPipelineAnalysis();
return;
}
@@ -332,163 +257,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
}
async handleDetectionEvent(detection: ObjectsDetected, redetect?: (boundingBox: [number, number, number, number]) => Promise<ObjectDetectionResult[]>, mediaObject?: MediaObject) {
this.detectorRunning = detection.running;
detection = await this.trackObjects(detection);
// apply the zones to the detections and get a shallow copy list of detections after
// exclusion zones have applied
const zonedDetections = this.applyZones(detection)
.filter(d => {
if (!d.zones?.length)
return d.bestSecondPassScore >= this.secondScoreThreshold || d.score >= this.scoreThreshold;
for (const zone of d.zones || []) {
const zi = this.zoneInfos[zone];
const scoreThreshold = zi?.scoreThreshold || this.scoreThreshold;
const secondScoreThreshold = zi?.secondScoreThreshold || this.secondScoreThreshold;
// keep the object if it passes the score check, or has already passed a second score check.
if (d.bestSecondPassScore >= secondScoreThreshold || d.score >= scoreThreshold)
return true;
}
});
let retainImage = false;
if (!this.hasMotionType && redetect && this.secondScoreThreshold && detection.detections) {
const detections = detection.detections as TrackedDetection[];
const newOrBetterDetections = zonedDetections.filter(d => d.newOrBetterDetection);
detections?.forEach(d => d.newOrBetterDetection = false);
// anything with a higher pass initial score should be redetected
// as it may yield a better second pass score and thus a better thumbnail.
await Promise.allSettled(newOrBetterDetections.map(async d => {
const maybeUpdateSecondPassScore = (secondPassScore: number) => {
let better = false;
// initialize second pass result
if (!d.bestSecondPassScore) {
better = true;
d.bestSecondPassScore = 0;
}
// retain passing the second pass threshold for first time.
if (d.bestSecondPassScore < this.secondScoreThreshold && secondPassScore >= this.secondScoreThreshold) {
this.console.log('improved', d.id, secondPassScore, d.score);
better = true;
retainImage = true;
}
else if (secondPassScore > d.bestSecondPassScore * 1.1) {
this.console.log('improved', d.id, secondPassScore, d.score);
better = true;
retainImage = true;
}
if (better)
d.bestSecondPassScore = secondPassScore;
return better;
}
// the initial score may be sufficient.
if (d.score >= this.secondScoreThreshold) {
maybeUpdateSecondPassScore(d.score);
return;
}
const redetected = await redetect(d.boundingBox);
const best = redetected.filter(r => r.className === d.className).sort((a, b) => b.score - a.score)?.[0];
if (best) {
if (maybeUpdateSecondPassScore(best.score)) {
d.boundingBox = best.boundingBox;
}
}
}));
const secondPassDetections = zonedDetections.filter(d => d.bestSecondPassScore >= this.secondScoreThreshold)
.map(d => ({
...d,
score: d.bestSecondPassScore,
}));
detection.detections = secondPassDetections;
}
else {
detection.detections = zonedDetections;
}
if (detection.detections) {
const trackedDetections = cloneDeep(detection.detections) as TrackedDetection[];
for (const d of trackedDetections) {
delete d.bestScore;
delete d.bestSecondPassScore;
delete d.newOrBetterDetection;
}
detection.detections = trackedDetections;
}
const now = Date.now();
if (this.lastDetectionInput + this.storageSettings.values.detectionTimeout * 1000 < Date.now())
retainImage = true;
if (retainImage && mediaObject) {
this.lastDetectionInput = now;
this.setDetection(detection, mediaObject);
}
this.reportObjectDetections(detection);
return retainImage;
}
get scoreThreshold() {
return parseFloat(this.storage.getItem('scoreThreshold')) || defaultScoreThreshold;
}
get secondScoreThreshold() {
const r = parseFloat(this.storage.getItem('secondScoreThreshold'));
if (isNaN(r))
return defaultSecondScoreThreshold;
return r;
}
async onDetection(detection: ObjectsDetected, redetect?: (boundingBox: [number, number, number, number]) => Promise<ObjectDetectionResult[]>, mediaObject?: MediaObject): Promise<boolean> {
// detection.detections = detection.detections?.filter(d => d.score >= this.scoreThreshold);
return this.handleDetectionEvent(detection, redetect, mediaObject);
}
async onDetectionEnded(detection: ObjectsDetected): Promise<void> {
this.handleDetectionEvent(detection);
}
async startSnapshotAnalysis() {
if (this.detectorRunning)
return;
this.detectorRunning = true;
this.analyzeStop = Date.now() + this.getDetectionDuration();
while (this.detectorRunning) {
const now = Date.now();
if (now > this.analyzeStop)
break;
try {
const mo = await this.mixinDevice.takePicture({
reason: 'event',
});
const found = await this.objectDetection.detectObjects(mo, {
detectionId: this.detectionId,
duration: this.getDetectionDuration(),
settings: this.getCurrentSettings(),
}, this);
}
catch (e) {
this.console.error('snapshot detection error', e);
}
// cameras tend to only refresh every 1s at best.
// maybe get this value from somewhere? or sha the jpeg?
const diff = now + 1100 - Date.now();
if (diff > 0)
await sleep(diff);
}
this.endObjectDetection();
}
async startPipelineAnalysis() {
if (this.detectorRunning)
return;
@@ -573,20 +341,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
// apply the zones to the detections and get a shallow copy list of detections after
// exclusion zones have applied
const zonedDetections = this.applyZones(detected.detected);
const filteredDetections = zonedDetections
.filter(d => {
if (!d.zones?.length)
return d.score >= this.scoreThreshold;
for (const zone of d.zones || []) {
const zi = this.zoneInfos[zone];
const scoreThreshold = zi?.scoreThreshold || this.scoreThreshold;
if (d.score >= scoreThreshold)
return true;
}
});
detected.detected.detections = filteredDetections;
detected.detected.detections = zonedDetections;
detections++;
// this.console.warn('dps', detections / (Date.now() - start) * 1000);
@@ -615,79 +370,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
}
async startStreamAnalysis() {
if (this.newPipeline) {
await this.startPipelineAnalysis();
}
else if (!this.hasMotionType && this.storageSettings.values.captureMode === 'Snapshot') {
await this.startSnapshotAnalysis();
}
else {
await this.startVideoDetection();
}
}
async extendedObjectDetect(force?: boolean) {
if (!this.hasMotionType && this.storageSettings.values.captureMode === 'Snapshot') {
this.analyzeStop = Date.now() + this.getDetectionDuration();
}
else {
try {
if (!force && !this.motionDetected)
return;
await this.objectDetection?.detectObjects(undefined, {
detectionId: this.detectionId,
duration: this.getDetectionDuration(),
settings: this.getCurrentSettings(),
}, this);
}
catch (e) {
// ignore any
}
}
}
async startVideoDetection() {
try {
const settings = this.getCurrentSettings();
// prevent stream retrieval noise until notified that the detection is no longer running.
if (this.detectorRunning) {
const session = await this.objectDetection?.detectObjects(undefined, {
detectionId: this.detectionId,
duration: this.getDetectionDuration(),
settings,
}, this);
this.detectorRunning = session.running;
if (this.detectorRunning)
return;
}
// dummy up the last detection time to prevent the idle timers from purging everything.
this.detectionState.lastDetection = Date.now();
this.detectorRunning = true;
let stream: MediaObject;
stream = await this.cameraDevice.getVideoStream({
destination: !this.hasMotionType ? 'local-recorder' : 'low-resolution',
// ask rebroadcast to mute audio, not needed.
audio: null,
});
const session = await this.objectDetection?.detectObjects(stream, {
detectionId: this.detectionId,
duration: this.getDetectionDuration(),
settings,
}, this);
this.detectorRunning = session.running;
}
catch (e) {
this.console.log('failure retrieving stream', e);
this.detectorRunning = false;
}
}
normalizeBox(boundingBox: [number, number, number, number], inputDimensions: [number, number]) {
let [x, y, width, height] = boundingBox;
let x2 = x + width;
@@ -806,88 +488,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
this.onDeviceEvent(ScryptedInterface.ObjectDetector, detection);
}
async trackObjects(detectionResult: ObjectsDetected, showAll?: boolean) {
// do not denoise
if (this.hasMotionType) {
return detectionResult;
}
if (!detectionResult?.detections) {
// detection session ended.
return detectionResult;
}
const { detections } = detectionResult;
const found: DenoisedDetectionEntry<TrackedDetection>[] = [];
denoiseDetections<TrackedDetection>(this.detectionState, detections.map(detection => ({
get id() {
return detection.id;
},
set id(id) {
detection.id = id;
},
name: detection.className,
score: detection.score,
detection,
get firstSeen() {
return detection.history?.firstSeen
},
set firstSeen(value) {
detection.history = detection.history || {
firstSeen: value,
lastSeen: value,
};
detection.history.firstSeen = value;
},
get lastSeen() {
return detection.history?.lastSeen
},
set lastSeen(value) {
detection.history = detection.history || {
firstSeen: value,
lastSeen: value,
};
detection.history.lastSeen = value;
},
boundingBox: detection.boundingBox,
})), {
timeout: this.storageSettings.values.detectionTimeout * 1000,
added: d => {
found.push(d);
d.detection.bestScore = d.detection.score;
d.detection.newOrBetterDetection = true;
},
removed: d => {
this.console.log('expired detection:', `${d.detection.className} (${d.detection.score})`);
if (detectionResult.running)
this.extendedObjectDetect();
},
retained: (d, o) => {
if (d.detection.score > o.detection.bestScore) {
d.detection.bestScore = d.detection.score;
d.detection.newOrBetterDetection = true;
}
else {
d.detection.bestScore = o.detection.bestScore;
}
d.detection.bestSecondPassScore = o.detection.bestSecondPassScore;
},
expiring: (d) => {
},
});
if (found.length) {
this.console.log('new detection:', found.map(d => `${d.id} ${d.detection.className} (${d.detection.score})`).join(', '));
if (detectionResult.running)
this.extendedObjectDetect();
}
if (found.length || showAll) {
this.console.log('current detections:', this.detectionState.previousDetections.map(d => `${d.detection.className} (${d.detection.score}, ${d.detection.boundingBox?.join(', ')})`).join(', '));
}
return detectionResult;
}
setDetection(detection: ObjectsDetected, detectionInput: MediaObject) {
if (!detection.detectionId)
detection.detectionId = crypto.randomBytes(4).toString('hex');
@@ -942,9 +542,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
get newPipeline() {
if (!this.plugin.storageSettings.values.newPipeline)
return;
const newPipeline = this.storageSettings.values.newPipeline;
if (!newPipeline)
return newPipeline;
@@ -979,8 +576,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
}
this.storageSettings.settings.motionSensorSupplementation.hide = !this.hasMotionType || !this.mixinDeviceInterfaces.includes(ScryptedInterface.MotionSensor);
this.storageSettings.settings.captureMode.hide = this.hasMotionType || !!this.plugin.storageSettings.values.newPipeline;
this.storageSettings.settings.newPipeline.hide = this.hasMotionType || !this.plugin.storageSettings.values.newPipeline;
this.storageSettings.settings.detectionDuration.hide = this.hasMotionType;
this.storageSettings.settings.detectionTimeout.hide = this.hasMotionType;
this.storageSettings.settings.motionDuration.hide = !this.hasMotionType;
@@ -988,23 +583,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
settings.push(...await this.storageSettings.getSettings());
let hideThreshold = true;
if (!this.hasMotionType) {
let hasInclusionZone = false;
for (const zone of Object.keys(this.zones)) {
const zi = this.zoneInfos[zone];
if (!zi?.exclusion) {
hasInclusionZone = true;
break;
}
}
if (!hasInclusionZone) {
hideThreshold = false;
}
}
this.storageSettings.settings.scoreThreshold.hide = hideThreshold;
this.storageSettings.settings.secondScoreThreshold.hide = hideThreshold;
settings.push({
key: 'zones',
title: 'Zones',
@@ -1048,38 +626,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
],
value: zi?.type || 'Intersect',
});
if (!this.hasMotionType) {
settings.push(
{
subgroup,
key: `zoneinfo-classes-${name}`,
title: `Detection Classes`,
description: 'The detection classes to match inside this zone. An empty list will match all classes.',
choices: (await this.getObjectTypes())?.classes || [],
value: zi?.classes || [],
multiple: true,
},
{
subgroup,
title: 'Minimum Detection Confidence',
description: 'Higher values eliminate false positives and low quality recognition candidates.',
key: `zoneinfo-scoreThreshold-${name}`,
type: 'number',
value: zi?.scoreThreshold || this.scoreThreshold,
placeholder: '.2',
},
{
subgroup,
title: 'Second Pass Confidence',
description: 'Crop and reanalyze a result from the initial detection pass to get more accurate results.',
key: `zoneinfo-secondScoreThreshold-${name}`,
type: 'number',
value: zi?.secondScoreThreshold || this.secondScoreThreshold,
placeholder: '.7',
},
);
}
}
if (!this.hasMotionType) {
@@ -1157,7 +703,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
if (key === 'analyzeButton') {
this.analyzeStop = Date.now() + 60000;
// await this.snapshotDetection();
await this.startStreamAnalysis();
await this.startPipelineAnalysis();
}
else {
const settings = this.getCurrentSettings();
@@ -1175,7 +721,6 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
this.clearMotionTimeout();
this.motionListener?.removeListener();
this.motionMixinListener?.removeListener();
this.detectorListener?.removeListener();
this.endObjectDetection();
}
}
@@ -1246,11 +791,6 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
currentMixins = new Set<ObjectDetectorMixin>();
storageSettings = new StorageSettings(this, {
newPipeline: {
title: 'New Video Pipeline',
description: 'WARNING! DO NOT ENABLE: Use the new video pipeline. Leave blank to use the legacy pipeline.',
type: 'boolean',
},
activeMotionDetections: {
title: 'Active Motion Detection Sessions',
readonly: true,

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/opencv",
"version": "0.0.69",
"version": "0.0.70",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/opencv",
"version": "0.0.69",
"version": "0.0.70",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -36,5 +36,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.69"
"version": "0.0.70"
}

View File

@@ -9,3 +9,4 @@ imutils>=0.5.0
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
# not available on armhf
opencv-python; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/pam-diff",
"version": "0.0.17",
"version": "0.0.18",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/pam-diff",
"version": "0.0.17",
"version": "0.0.18",
"hasInstallScript": true,
"dependencies": {
"@types/node": "^16.6.1",

View File

@@ -43,5 +43,5 @@
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
},
"version": "0.0.17"
"version": "0.0.18"
}

View File

@@ -231,7 +231,7 @@ ENDHDR
detections.push(
{
className: 'motion',
score: trigger.percent / 100,
score: 1,
boundingBox: [blob.minX, blob.minY, blob.maxX - blob.minX, blob.maxY - blob.minY],
}
)
@@ -241,7 +241,7 @@ ENDHDR
detections.push(
{
className: 'motion',
score: trigger.percent / 100,
score: 1,
}
)
}

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.18",
"version": "0.1.22",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@scrypted/python-codecs",
"version": "0.1.18",
"version": "0.1.22",
"devDependencies": {
"@scrypted/sdk": "file:../../sdk"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/python-codecs",
"version": "0.1.18",
"version": "0.1.22",
"description": "Python Codecs for Scrypted",
"keywords": [
"scrypted",

View File

@@ -34,7 +34,7 @@ async def generateVideoFramesGstreamer(mediaObject: scrypted_sdk.MediaObject, op
else:
raise Exception('unknown container %s' % container)
elif videosrc.startswith('rtsp'):
videosrc = 'rtspsrc buffer-mode=0 location=%s protocols=tcp latency=0 is-live=false' % videosrc
videosrc = 'rtspsrc buffer-mode=0 location=%s protocols=tcp latency=0' % videosrc
if videoCodec == 'h264':
videosrc += ' ! rtph264depay ! h264parse'

View File

@@ -27,6 +27,8 @@ async def generateVideoFramesLibav(mediaObject: scrypted_sdk.MediaObject, option
# stream.codec_context.options['-analyzeduration'] = '0'
# stream.codec_context.options['-probesize'] = '500000'
gray = options and options.get('format') == 'gray'
start = 0
try:
for idx, frame in enumerate(container.decode(stream)):
@@ -39,7 +41,12 @@ async def generateVideoFramesLibav(mediaObject: scrypted_sdk.MediaObject, option
continue
# print(frame)
if vipsimage.pyvips:
vips = vipsimage.pyvips.Image.new_from_array(frame.to_ndarray(format='rgb24'))
if gray and frame.format.name.startswith('yuv') and frame.planes and len(frame.planes):
vips = vipsimage.new_from_memory(memoryview(frame.planes[0]), frame.width, frame.height, 1)
elif gray:
vips = vipsimage.pyvips.Image.new_from_array(frame.to_ndarray(format='gray'))
else:
vips = vipsimage.pyvips.Image.new_from_array(frame.to_ndarray(format='rgb24'))
vipsImage = vipsimage.VipsImage(vips)
try:
mo = await vipsimage.createVipsMediaObject(vipsImage)
@@ -48,7 +55,16 @@ async def generateVideoFramesLibav(mediaObject: scrypted_sdk.MediaObject, option
vipsImage.vipsImage = None
vips.invalidate()
else:
pil = frame.to_image()
if gray and frame.format.name.startswith('yuv') and frame.planes and len(frame.planes):
pil = pilimage.new_from_memory(memoryview(frame.planes[0]), frame.width, frame.height, 1)
elif gray:
rgb = frame.to_image()
try:
pil = rgb.convert('L')
finally:
rgb.close()
else:
pil = frame.to_image()
pilImage = pilimage.PILImage(pil)
try:
mo = await pilimage.createPILMediaObject(pilImage)

View File

@@ -21,19 +21,26 @@ class PILImage(scrypted_sdk.VideoFrame):
if not options or not options.get('format', None):
def format():
bytesArray = io.BytesIO()
pilImage.pilImage.save(bytesArray, format='JPEG')
return bytesArray.getvalue()
return pilImage.pilImage.tobytes()
return await to_thread(format)
elif options['format'] == 'rgb':
def format():
rgb = pilImage.pilImage
if rgb.format == 'RGBA':
rgb = rgb.convert('RGB')
return rgb.tobytes()
rgbx = pilImage.pilImage
if rgbx.format != 'RGBA':
return rgbx.tobytes()
rgb = rgbx.convert('RGB')
try:
return rgb.tobytes()
finally:
rgb.close()
return await to_thread(format)
return await to_thread(lambda: pilImage.pilImage.write_to_buffer('.' + options['format']))
def save():
bytesArray = io.BytesIO()
pilImage.pilImage.save(bytesArray, format=options['format'])
return bytesArray.getvalue()
return await to_thread(lambda: save())
async def toPILImage(self, options: scrypted_sdk.ImageOptions = None):
return await to_thread(lambda: toPILImage(self, options))
@@ -66,7 +73,7 @@ def toPILImage(pilImageWrapper: PILImage, options: scrypted_sdk.ImageOptions = N
if not width:
width = pilImage.width * yscale
pilImage = pilImage.resize((width, height), resample=Image.Resampling.BILINEAR)
pilImage = pilImage.resize((width, height), resample=Image.BILINEAR)
return PILImage(pilImage)

View File

@@ -1,7 +1,11 @@
# plugin
# gobject instrospection for gstreamer.
PyGObject>=3.30.4; sys_platform != 'win32'
# libav doesnt work on arm7
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
# pyvips is not available on windows, and is preinstalled as part of the installer scripts on
# mac and linux.
pyvips; sys_platform != 'win32'
# in case pyvips fails to load, use a pillow fallback.

View File

@@ -13,7 +13,6 @@ import os
from detect import DetectionSession, DetectPlugin
from .sort_oh import tracker
import numpy as np
import traceback
@@ -24,14 +23,12 @@ except:
class PredictSession(DetectionSession):
image: Image.Image
tracker: sort_oh.tracker.Sort_OH
def __init__(self, start_time: float) -> None:
super().__init__()
self.image = None
self.processed = 0
self.start_time = start_time
self.tracker = None
def parse_label_contents(contents: str):
lines = contents.splitlines()
@@ -121,7 +118,6 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
self.toMimeType = scrypted_sdk.ScryptedMimeTypes.MediaObject.value
self.crop = False
self.trackers: Mapping[str, tracker.Sort_OH] = {}
# periodic restart because there seems to be leaks in tflite or coral API.
loop = asyncio.get_event_loop()
@@ -210,23 +206,7 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
],
}
trackerWindow: Setting = {
'title': 'Tracker Window',
'subgroup': 'Advanced',
'description': 'Internal Setting. Do not change.',
'key': 'trackerWindow',
'value': 3,
'type': 'number',
}
trackerCertainty: Setting = {
'title': 'Tracker Certainty',
'subgroup': 'Advanced',
'description': 'Internal Setting. Do not change.',
'key': 'trackerCertainty',
'value': .2,
'type': 'number',
}
return [allowList, trackerWindow, trackerCertainty]
return [allowList]
def create_detection_result(self, objs: List[Prediction], size, allowList, convert_to_src_size=None) -> ObjectsDetected:
detections: List[ObjectDetectionResult] = []
@@ -400,24 +380,6 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
(w, h) = self.get_input_size() or image.size
(iw, ih) = image.size
if detection_session and not detection_session.tracker:
t = self.trackers.get(detection_session.id)
if not t:
t = tracker.Sort_OH(scene=np.array([iw, ih]))
trackerCertainty = settings.get('trackerCertainty')
if not isinstance(trackerCertainty, int):
trackerCertainty = .2
t.conf_three_frame_certainty = trackerCertainty * 3
trackerWindow = settings.get('trackerWindow')
if not isinstance(trackerWindow, int):
trackerWindow = 3
t.conf_unmatched_history_size = trackerWindow
self.trackers[detection_session.id] = t
detection_session.tracker = t
# conf_trgt = 0.35
# conf_objt = 0.75
# detection_session.tracker.conf_trgt = conf_trgt
# detection_session.tracker.conf_objt = conf_objt
# this a single pass or the second pass. detect once and return results.
if multipass_crop:
@@ -543,61 +505,11 @@ class PredictPlugin(DetectPlugin, scrypted_sdk.BufferConverter, scrypted_sdk.Set
ret = ret1
ret['detections'] = dedupe_detections(ret1['detections'] + ret2['detections'], is_same_detection=is_same_detection_middle)
if detection_session:
self.track(detection_session, ret)
if not len(ret['detections']):
return ret, RawImage(image)
return ret, RawImage(image)
def track(self, detection_session: PredictSession, ret: ObjectsDetected):
detections = ret['detections']
sort_input = []
for d in ret['detections']:
r: ObjectDetectionResult = d
l, t, w, h = r['boundingBox']
sort_input.append([l, t, l + w, t + h, r['score']])
trackers, unmatched_trckr, unmatched_gts = detection_session.tracker.update(np.array(sort_input), [])
for td in trackers:
x0, y0, x1, y1, trackID = td[0].item(), td[1].item(
), td[2].item(), td[3].item(), td[4].item()
slop = 0
obj: ObjectDetectionResult = None
ta = (x1 - x0) * (y1 - y0)
box = Rectangle(x0, y0, x1, y1)
for d in detections:
if d.get('id'):
continue
ob: ObjectDetectionResult = d
dx0, dy0, dw, dh = ob['boundingBox']
dx1 = dx0 + dw
dy1 = dy0 + dh
da = dw * dh
area = intersect_area(Rectangle(dx0, dy0, dx1, dy1), box)
if not area:
continue
# intersect area always gonna be smaller than
# the detection or tracker area.
# greater numbers, ie approaching 2, is better.
dslop = area / ta + area / da
if (dslop > slop):
slop = dslop
obj = ob
if obj:
obj['id'] = str(trackID)
# this may happen if tracker predicts something is still in the scene
# but was not detected
# else:
# print('unresolved tracker')
# for d in detections:
# if not d.get('id'):
# # this happens if the tracker is not confident in a new detection yet due
# # to low score or has not been found in enough frames
# if d['className'] == 'person':
# print('untracked %s: %s' % (d['className'], d['score']))
async def run_detection_crop(self, detection_session: DetectionSession, sample: RawImage, settings: Any, src_size, convert_to_src_size, bounding_box: Tuple[float, float, float, float]) -> ObjectsDetected:
(ret, _) = await self.run_detection_image(detection_session, sample.image, settings, src_size, convert_to_src_size, bounding_box)
return ret

View File

@@ -7,10 +7,4 @@ Pillow>=5.4.1; sys_platform != 'linux' or platform_machine != 'x86_64'
pillow-simd; sys_platform == 'linux' and platform_machine == 'x86_64'
pycoral~=2.0
PyGObject>=3.30.4; sys_platform != 'win32'
# libav doesnt work on arm7
av>=10.0.0; sys_platform != 'linux' or platform_machine == 'x86_64' or platform_machine == 'aarch64'
tflite-runtime==2.5.0.post1
# sort_oh
scipy
filterpy

View File

@@ -21,6 +21,7 @@ from predict import PredictPlugin
import concurrent.futures
import queue
import asyncio
from time import time
def parse_label_contents(contents: str):
lines = contents.splitlines()
@@ -116,6 +117,7 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
def predict():
interpreter = self.interpreters.get()
try:
print('predict s %s' % time())
common.set_input(
interpreter, input)
scale = (1, 1)
@@ -131,6 +133,7 @@ class TensorFlowLitePlugin(PredictPlugin, scrypted_sdk.BufferConverter, scrypted
raise e
finally:
self.interpreters.put(interpreter)
print('predict e %s' % time())
objs = await asyncio.get_event_loop().run_in_executor(self.executor, predict)

View File

@@ -27,7 +27,9 @@
"${workspaceFolder}/**/*.js"
],
"env": {
"SCRYPTED_PYTHON_PATH": "python3.10",
// force usage of system python because brew python is 3.11
// which has no wheels for coreml tools or tflite-runtime
"SCRYPTED_PYTHON_PATH": "/usr/bin/python3",
// "SCRYPTED_SHARED_WORKER": "true",
// "SCRYPTED_DISABLE_AUTHENTICATION": "true",
// "DEBUG": "*",

View File

@@ -1,12 +1,12 @@
{
"name": "@scrypted/server",
"version": "0.7.28",
"version": "0.7.35",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "@scrypted/server",
"version": "0.7.28",
"version": "0.7.35",
"license": "ISC",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10",

View File

@@ -1,6 +1,6 @@
{
"name": "@scrypted/server",
"version": "0.7.29",
"version": "0.7.36",
"description": "",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.10",

View File

@@ -264,6 +264,14 @@ class PluginRemote:
nativeId, *values, sep=sep, end=end, flush=flush), self.loop)
async def loadZip(self, packageJson, zipData, options: dict=None):
try:
return await self.loadZipWrapped(packageJson, zipData, options)
except:
print('plugin start/fork failed')
traceback.print_exc()
raise
async def loadZipWrapped(self, packageJson, zipData, options: dict=None):
sdk = ScryptedStatic()
clusterId = options['clusterId']
@@ -370,6 +378,18 @@ class PluginRemote:
plugin_volume = os.environ.get('SCRYPTED_PLUGIN_VOLUME')
# it's possible to run 32bit docker on aarch64, which cause pip requirements
# to fail because pip only allows filtering on machine, even if running a different architeture.
# this will cause prebuilt wheel installation to fail.
if platform.machine() == 'aarch64' and platform.architecture()[0] == '32bit':
print('=============================================')
print('Python machine vs architecture mismatch detected. Plugin installation may fail.')
print('If Scrypted is running in docker, the docker version may be 32bit while the host kernel is 64bit.')
print('This may be resolved by reinstalling a 64bit docker.')
print('The docker architecture can be checked with the command: "file $(which docker)"')
print('The host architecture can be checked with: "uname -m"')
print('=============================================')
python_version = 'python%s' % str(
sys.version_info[0])+"."+str(sys.version_info[1])
print('python version:', python_version)
@@ -519,20 +539,10 @@ class PluginRemote:
self.deviceManager, self.mediaManager)
if not forkMain:
try:
from main import create_scrypted_plugin # type: ignore
except:
print('plugin failed to start')
traceback.print_exc()
raise
from main import create_scrypted_plugin # type: ignore
return await rpc.maybe_await(create_scrypted_plugin())
try:
from main import fork # type: ignore
except:
print('fork failed to start')
traceback.print_exc()
raise
from main import fork # type: ignore
forked = await rpc.maybe_await(fork())
if type(forked) == dict:
forked[rpc.RpcPeer.PROPERTY_JSON_COPY_SERIALIZE_CHILDREN] = True

View File

@@ -1,3 +1,4 @@
import os from 'os';
import { Device, EngineIOHandler } from '@scrypted/types';
import AdmZip from 'adm-zip';
import crypto from 'crypto';
@@ -310,7 +311,7 @@ export class PluginHost {
this.worker.stdout.on('data', data => console.log(data.toString()));
this.worker.stderr.on('data', data => console.error(data.toString()));
const consoleHeader = `server version: ${serverVersion}\nplugin version: ${this.pluginId} ${this.packageJson.version}\n`;
const consoleHeader = `${os.platform()} ${os.arch()} ${os.machine()} ${os.version()}\nserver version: ${serverVersion}\nplugin version: ${this.pluginId} ${this.packageJson.version}\n`;
this.consoleServer = createConsoleServer(this.worker.stdout, this.worker.stderr, consoleHeader);
const disconnect = () => {

View File

@@ -645,7 +645,13 @@ export function attachPluginRemote(peer: RpcPeer, options?: PluginRemoteAttachOp
params.pluginRuntimeAPI = ret;
return options.onLoadZip(ret, params, packageJson, zipData, zipOptions);
try {
return await options.onLoadZip(ret, params, packageJson, zipData, zipOptions);
}
catch (e) {
console.error('plugin start/fork failed', e)
throw e;
}
},
}

View File

@@ -39,7 +39,10 @@ export class PythonRuntimeWorker extends ChildProcessWorker {
'/usr/local/lib/gstreamer-1.0',
];
for (const gstPath of gstPaths) {
if (fs.existsSync(path.join(gstPath, 'libgstx264.dylib'))) {
// search for common plugins.
if (fs.existsSync(path.join(gstPath, 'libgstx264.dylib'))
|| fs.existsSync(path.join(gstPath, 'libgstlibav.dylib'))
|| fs.existsSync(path.join(gstPath, 'libgstvideotestsrc.dylib'))) {
gstEnv['GST_PLUGIN_PATH'] = gstPath;
break;
}

View File

@@ -446,25 +446,24 @@ async function start(mainFilename: string, options?: {
let hasLogin = await db.getCount(ScryptedUser) > 0;
if (process.env.SCRYPTED_ADMIN_USERNAME && process.env.SCRYPTED_ADMIN_TOKEN) {
let user = await db.tryGet(ScryptedUser, process.env.SCRYPTED_ADMIN_USERNAME);
if (!user) {
user = new ScryptedUser();
user._id = process.env.SCRYPTED_ADMIN_USERNAME;
setScryptedUserPassword(user, crypto.randomBytes(8).toString('hex'), Date.now());
user.token = crypto.randomBytes(16).toString('hex');
await db.upsert(user);
hasLogin = true;
}
}
app.options('/login', (req, res) => {
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization, Content-Length, X-Requested-With');
res.send(200);
});
const resetLogin = path.join(getScryptedVolume(), 'reset-login');
async function checkResetLogin() {
try {
if (fs.existsSync(resetLogin)) {
fs.rmSync(resetLogin);
await db.removeAll(ScryptedUser);
hasLogin = false;
}
}
catch (e) {
}
}
app.post('/login', async (req, res) => {
const { username, password, change_password, maxAge: maxAgeRequested } = req.body;
const timestamp = Date.now();
@@ -550,6 +549,19 @@ async function start(mainFilename: string, options?: {
});
});
const resetLogin = path.join(getScryptedVolume(), 'reset-login');
async function checkResetLogin() {
try {
if (fs.existsSync(resetLogin)) {
fs.rmSync(resetLogin);
await db.removeAll(ScryptedUser);
hasLogin = false;
}
}
catch (e) {
}
}
app.get('/login', async (req, res) => {
await checkResetLogin();
@@ -558,7 +570,11 @@ async function start(mainFilename: string, options?: {
// env/header based admin login
if (res.locals.username && res.locals.username === process.env.SCRYPTED_ADMIN_USERNAME) {
const userToken = new UserToken(res.locals.username, undefined, Date.now());
res.send({
...createTokens(userToken),
expiration: ONE_DAY_MILLISECONDS,
username: res.locals.username,
token: process.env.SCRYPTED_ADMIN_TOKEN,
addresses,