Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fe165295fb |
12
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -13,11 +13,11 @@ Before opening an issue, view the device's Console logs in the Scrypted Manageme
|
||||
|
||||
**DO NOT OPEN ISSUES FOR ANY OF THE FOLLOWING:**
|
||||
|
||||
* Server or hardware setup assistance. Use Discord, Reddit, or Github Discussions.
|
||||
* Server setup assistance. Use Discord, Reddit, or Github Discussions.
|
||||
* Hardware setup assistance. Use Discord, Reddit, or Github Discussions.
|
||||
* Feature Requests. Use Discord, Reddit, or Github Discussions.
|
||||
* Packet loss in your camera logs. This is wifi/network congestion.
|
||||
* HomeKit weirdness. See HomeKit troubleshooting guide.
|
||||
* Release schedules or timelines. Releases are rolled out unevenly across the different server platforms.
|
||||
|
||||
However, if something **was working**, and is now **no longer working**, you may create a Github issue.
|
||||
Created issues that do not meet these requirements or are improperly filled out will be immediately closed.
|
||||
@@ -27,11 +27,6 @@ Created issues that do not meet these requirements or are improperly filled out
|
||||
1. Delete this section and everything above it.
|
||||
2. Fill out the sections below.
|
||||
|
||||
** Before You Submit**
|
||||
|
||||
- [ ] I checked that my issue isn't already filed: [Search open issues](https://github.com/koush/scrypted/issues).
|
||||
- [ ] I checked the relevant camera/device and/or plugin `Log` in the `Management Console` for errors or warnings that may help identify and resolve the issue myself.
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is. The issue tracker is only for reporting bugs in Scrypted, for general support check Discord. Hardrware support requests or assistance requests will be immediately closed.
|
||||
|
||||
@@ -48,9 +43,6 @@ A clear and concise description of what you expected to happen.
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Logs**
|
||||
Include a `Log` from the device/camera in the management console (and if applicable, the affacted plugin, like HomeKit).
|
||||
|
||||
**Server (please complete the following information):**
|
||||
- OS: [e.g. Ubuntu]
|
||||
- Installation Method: [e.g. Desktop App, Docker, Local]
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,2 +0,0 @@
|
||||
# disable blank issue creation
|
||||
blank_issues_enabled: false
|
||||
10
.github/workflows/build-plugins-changed.yml
vendored
@@ -1,11 +1,11 @@
|
||||
name: Build changed plugins
|
||||
|
||||
on:
|
||||
# push:
|
||||
# branches: ["main"]
|
||||
# paths: ["plugins/**"]
|
||||
# pull_request:
|
||||
# paths: ["plugins/**"]
|
||||
push:
|
||||
branches: ["main"]
|
||||
paths: ["plugins/**"]
|
||||
pull_request:
|
||||
paths: ["plugins/**"]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
||||
6
.github/workflows/build-sdk.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
pull_request:
|
||||
paths: ["sdk/**"]
|
||||
workflow_dispatch:
|
||||
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
@@ -15,11 +15,11 @@ jobs:
|
||||
defaults:
|
||||
run:
|
||||
working-directory: ./sdk
|
||||
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22.4.1
|
||||
node-version: 18
|
||||
- run: npm ci
|
||||
- run: npm run build
|
||||
|
||||
100
.github/workflows/docker-common.yml
vendored
@@ -7,11 +7,14 @@ jobs:
|
||||
build:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: self-hosted
|
||||
env:
|
||||
NODE_VERSION: '22'
|
||||
# runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
BASE: ["noble"]
|
||||
NODE_VERSION: [
|
||||
# "18",
|
||||
"20"
|
||||
]
|
||||
BASE: ["jammy"]
|
||||
FLAVOR: ["full", "lite"]
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
@@ -20,26 +23,12 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
append: |
|
||||
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
|
||||
platforms: linux/amd64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
@@ -65,85 +54,14 @@ jobs:
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
build-args: |
|
||||
NODE_VERSION=${{ env.NODE_VERSION }}
|
||||
NODE_VERSION=${{ matrix.NODE_VERSION }}
|
||||
BASE=${{ matrix.BASE }}
|
||||
context: install/docker/
|
||||
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.FLAVOR }}
|
||||
ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.FLAVOR }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
build-vendor:
|
||||
name: Push Vendor Docker image to Docker Hub
|
||||
needs: build
|
||||
runs-on: self-hosted
|
||||
strategy:
|
||||
matrix:
|
||||
BASE: ["noble"]
|
||||
VENDOR: ["nvidia", "nvidia-legacy", "intel", "amd"]
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
append: |
|
||||
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
|
||||
platforms: linux/amd64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/arm64
|
||||
append: |
|
||||
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM64 }}
|
||||
platforms: linux/arm64
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to Github Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push Docker image (scrypted-common)
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
build-args: |
|
||||
BASE=ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-full
|
||||
context: install/docker/
|
||||
file: install/docker/Dockerfile.${{ matrix.VENDOR }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: |
|
||||
koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.VENDOR }}
|
||||
ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.VENDOR }}
|
||||
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
|
||||
ghcr.io/koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
56
.github/workflows/docker.yml
vendored
@@ -20,14 +20,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
BASE: [
|
||||
["noble-nvidia", ".s6", "noble-nvidia", "nvidia"],
|
||||
["noble-nvidia-legacy", ".s6", "noble-nvidia-legacy", "nvidia-legacy"],
|
||||
["noble-intel", ".s6", "noble-intel", "intel"],
|
||||
["noble-amd", ".s6", "noble-amd", "amd"],
|
||||
["noble-full", ".s6", "noble-full", "full"],
|
||||
["noble-lite", "", "noble-lite", "lite"],
|
||||
["noble-lite", ".router", "noble-router", "router"],
|
||||
"20-jammy-full",
|
||||
"20-jammy-lite",
|
||||
]
|
||||
SUPERVISOR: ["", ".s6"]
|
||||
steps:
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
@@ -46,26 +42,12 @@ jobs:
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up SSH
|
||||
uses: MrSquaare/ssh-setup-action@v2
|
||||
with:
|
||||
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
|
||||
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64
|
||||
append: |
|
||||
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
|
||||
platforms: linux/amd64
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
@@ -91,31 +73,23 @@ jobs:
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
build-args: |
|
||||
BASE=${{ matrix.BASE[0] }}
|
||||
BASE=${{ matrix.BASE }}
|
||||
SCRYPTED_INSTALL_VERSION=${{ steps.package-version.outputs.NPM_VERSION }}
|
||||
context: install/docker/
|
||||
file: install/docker/Dockerfile${{ matrix.BASE[1] }}
|
||||
file: install/docker/Dockerfile${{ matrix.SUPERVISOR }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
# when publishing a tag (beta or latest), platform and version, create some tags as follows.
|
||||
# using beta 0.0.1 as an example
|
||||
# koush/scrypted:v0.0.1-noble-full
|
||||
# koush/scrypted:beta
|
||||
# koush/scrypted:beta-nvidia|intel|full|router|lite
|
||||
|
||||
# using latest 0.0.2 as an example:
|
||||
# koush/scrypted:v0.0.2-noble-full
|
||||
# koush/scrypted:latest
|
||||
# koush/scrypted:nvidia|intel|full|router|lite
|
||||
tags: |
|
||||
${{ format('koush/scrypted:v{0}-{1}', github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION, matrix.BASE[2]) }}
|
||||
${{ matrix.BASE[2] == 'noble-full' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && format('koush/scrypted:{0}', matrix.BASE[3]) || '' }}
|
||||
${{ github.event.inputs.tag != 'latest' && format('koush/scrypted:{0}-{1}', github.event.inputs.tag, matrix.BASE[3]) || '' }}
|
||||
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
|
||||
${{ matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
|
||||
|
||||
${{ format('ghcr.io/koush/scrypted:v{1}-{0}', matrix.BASE[0], github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
|
||||
${{ matrix.BASE[2] == 'noble-full' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && format('ghcr.io/koush/scrypted:{0}', matrix.BASE[3]) || ''}}
|
||||
${{ github.event.inputs.tag != 'latest' && format('ghcr.io/koush/scrypted:{0}-{1}', github.event.inputs.tag, matrix.BASE[3]) || '' }}
|
||||
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
|
||||
${{ matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
|
||||
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
44
.github/workflows/static-sites.yml
vendored
@@ -1,44 +0,0 @@
|
||||
# Simple workflow for deploying static content to GitHub Pages
|
||||
name: Deploy static content to Pages
|
||||
|
||||
on:
|
||||
# Runs on pushes targeting the default branch
|
||||
push:
|
||||
branches: ["main"]
|
||||
paths: ["sites/static/**", ".github/workflows/static-sites.yml"]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
|
||||
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
# Single deploy job since we're just deploying
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v5
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
# Upload entire repository
|
||||
path: './sites/static'
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
64
.github/workflows/test.yml
vendored
@@ -9,28 +9,52 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
test_local:
|
||||
name: Test local installation on ${{ matrix.runner }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
runner: [ubuntu-latest, ubuntu-24.04-arm, macos-14, macos-13, windows-latest]
|
||||
test_linux_local:
|
||||
name: Test Linux local installation
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Parse latest server release
|
||||
id: parse_server
|
||||
shell: bash
|
||||
|
||||
- name: Run install script
|
||||
run: |
|
||||
VERSION=$(cat ./server/package-lock.json | jq -r '.version')
|
||||
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||
echo "Will test @scrypted/server@$VERSION"
|
||||
|
||||
- name: Install scrypted server
|
||||
uses: scryptedapp/setup-scrypted@v0.0.2
|
||||
with:
|
||||
branch: ${{ github.sha }}
|
||||
version: ${{ steps.parse_server.outputs.version }}
|
||||
cat ./install/local/install-scrypted-dependencies-linux.sh | sudo SERVICE_USER=$USER bash
|
||||
|
||||
- name: Test server is running
|
||||
run: |
|
||||
systemctl status scrypted.service
|
||||
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
|
||||
|
||||
test_mac_local:
|
||||
name: Test Mac local installation
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run install script
|
||||
run: |
|
||||
mkdir -p ~/.scrypted
|
||||
bash ./install/local/install-scrypted-dependencies-mac.sh
|
||||
|
||||
- name: Test server is running
|
||||
run: |
|
||||
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
|
||||
|
||||
test_windows_local:
|
||||
name: Test Windows local installation
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Run install script
|
||||
run: |
|
||||
.\install\local\install-scrypted-dependencies-win.ps1
|
||||
|
||||
- name: Test server is running
|
||||
run: |
|
||||
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
|
||||
|
||||
12
.gitmodules
vendored
@@ -1,6 +1,9 @@
|
||||
[submodule "plugins/unifi-protect/src/unifi-protect"]
|
||||
path = external/unifi-protect
|
||||
url = ../../koush/unifi-protect.git
|
||||
[submodule "plugins/myq/src/myq"]
|
||||
path = plugins/myq/src/myq
|
||||
url = ../../koush/myq.git
|
||||
[submodule "external/ring-client-api"]
|
||||
path = external/ring-client-api
|
||||
url = ../../koush/ring
|
||||
@@ -11,6 +14,12 @@
|
||||
[submodule "external/werift"]
|
||||
path = external/werift
|
||||
url = ../../koush/werift-webrtc
|
||||
[submodule "plugins/zwave/file-stream-rotator"]
|
||||
path = plugins/zwave/file-stream-rotator
|
||||
url = ../../koush/file-stream-rotator.git
|
||||
[submodule "sdk/developer.scrypted.app"]
|
||||
path = sdk/developer.scrypted.app
|
||||
url = ../../koush/developer.scrypted.app
|
||||
[submodule "plugins/sample-cameraprovider"]
|
||||
path = plugins/sample-cameraprovider
|
||||
url = ../../koush/scrypted-sample-cameraprovider
|
||||
@@ -20,3 +29,6 @@
|
||||
[submodule "plugins/wyze/docker-wyze-bridge"]
|
||||
path = plugins/wyze/docker-wyze-bridge
|
||||
url = ../../koush/docker-wyze-bridge.git
|
||||
[submodule "plugins/onvif/onvif"]
|
||||
path = plugins/onvif/onvif
|
||||
url = ../../koush/onvif.git
|
||||
|
||||
1
common/fs/@types/sdk/settings-mixin.d.ts
vendored
@@ -1 +0,0 @@
|
||||
../../../../sdk/dist/src/settings-mixin.d.ts
|
||||
1
common/fs/@types/sdk/storage-settings.d.ts
vendored
@@ -1 +0,0 @@
|
||||
../../../../sdk/dist/src/storage-settings.d.ts
|
||||
3731
common/package-lock.json
generated
@@ -12,13 +12,12 @@
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@scrypted/sdk": "file:../sdk",
|
||||
"@scrypted/types": "^0.5.27",
|
||||
"@scrypted/server": "file:../server",
|
||||
"http-auth-utils": "^5.0.1",
|
||||
"typescript": "^5.5.3"
|
||||
"typescript": "^5.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.19.11",
|
||||
"monaco-editor": "^0.50.0",
|
||||
"@types/node": "^20.11.0",
|
||||
"ts-node": "^10.9.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
export function createActivityTimeout(timeout: number, timeoutCallback: () => void) {
|
||||
let dataTimeout: NodeJS.Timeout;
|
||||
|
||||
let lastTime = Date.now();
|
||||
function resetActivityTimer() {
|
||||
lastTime = Date.now();
|
||||
}
|
||||
|
||||
function clearActivityTimer() {
|
||||
clearInterval(dataTimeout);
|
||||
}
|
||||
|
||||
if (timeout) {
|
||||
dataTimeout = setInterval(() => {
|
||||
if (Date.now() > lastTime + timeout) {
|
||||
clearInterval(dataTimeout);
|
||||
dataTimeout = undefined;
|
||||
timeoutCallback();
|
||||
}
|
||||
}, timeout);
|
||||
}
|
||||
|
||||
resetActivityTimer();
|
||||
return {
|
||||
resetActivityTimer,
|
||||
clearActivityTimer,
|
||||
}
|
||||
}
|
||||
@@ -9,16 +9,6 @@ export function createAsyncQueue<T>() {
|
||||
const waiting: Deferred<T>[] = [];
|
||||
const queued: { item: T, dequeued?: Deferred<void> }[] = [];
|
||||
|
||||
const wait = async (index: number) => {
|
||||
const q = queued[index];
|
||||
if (!q)
|
||||
return;
|
||||
if (!q.dequeued) {
|
||||
q.dequeued = new Deferred<void>();
|
||||
}
|
||||
return q.dequeued.promise;
|
||||
}
|
||||
|
||||
const dequeue = async () => {
|
||||
if (queued.length) {
|
||||
const { item, dequeued: enqueue } = queued.shift()!;
|
||||
@@ -50,7 +40,7 @@ export function createAsyncQueue<T>() {
|
||||
return false;
|
||||
|
||||
if (waiting.length) {
|
||||
const deferred = waiting.shift()!;
|
||||
const deferred = waiting.shift();
|
||||
dequeued?.resolve();
|
||||
deferred.resolve(item);
|
||||
return true;
|
||||
@@ -76,7 +66,7 @@ export function createAsyncQueue<T>() {
|
||||
dequeued?.reject(new Error('abort'));
|
||||
};
|
||||
|
||||
dequeued?.promise.catch(() => { }).finally(() => signal.removeEventListener('abort', h));
|
||||
dequeued.promise.catch(() => {}).finally(() => signal.removeEventListener('abort', h));
|
||||
signal.addEventListener('abort', h);
|
||||
|
||||
return true;
|
||||
@@ -89,7 +79,7 @@ export function createAsyncQueue<T>() {
|
||||
ended = e || new EndError();
|
||||
endDeferred.resolve();
|
||||
while (waiting.length) {
|
||||
waiting.shift()!.reject(ended);
|
||||
waiting.shift().reject(ended);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -104,7 +94,7 @@ export function createAsyncQueue<T>() {
|
||||
}
|
||||
catch (e) {
|
||||
// the yield above may raise an error, and the queue should be ended.
|
||||
end(e as Error);
|
||||
end(e);
|
||||
if (e instanceof EndError)
|
||||
return;
|
||||
throw e;
|
||||
@@ -133,9 +123,6 @@ export function createAsyncQueue<T>() {
|
||||
}
|
||||
|
||||
return {
|
||||
[Symbol.dispose]() {
|
||||
end(new Error('async queue disposed'));
|
||||
},
|
||||
get ended() {
|
||||
return ended;
|
||||
},
|
||||
@@ -164,28 +151,10 @@ export function createAsyncQueue<T>() {
|
||||
dequeue,
|
||||
get queue() {
|
||||
return queue();
|
||||
},
|
||||
wait,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function createAsyncQueueFromGenerator<T>(generator: AsyncGenerator<T>) {
|
||||
const q = createAsyncQueue<T>();
|
||||
(async () => {
|
||||
try {
|
||||
for await (const i of generator) {
|
||||
await q.enqueue(i);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
q.end(e as Error);
|
||||
}
|
||||
q.end();
|
||||
})();
|
||||
|
||||
return q;
|
||||
}
|
||||
|
||||
// async function testSlowEnqueue() {
|
||||
// const asyncQueue = createAsyncQueue<number>();
|
||||
|
||||
|
||||
@@ -1,209 +0,0 @@
|
||||
import sdk, { AudioStreamOptions, MediaStreamConfiguration, MediaStreamDestination, MediaStreamOptions, ScryptedDeviceBase, Setting } from "@scrypted/sdk";
|
||||
|
||||
export const automaticallyConfigureSettings: Setting = {
|
||||
key: 'autoconfigure',
|
||||
title: 'Automatically Configure Settings',
|
||||
description: 'Automatically configure and valdiate the camera codecs and other settings for optimal Scrypted performance. Some settings will require manual configuration via the camera web admin.',
|
||||
type: 'boolean',
|
||||
value: true,
|
||||
};
|
||||
|
||||
export const onvifAutoConfigureSettings: Setting = {
|
||||
key: 'onvif-autoconfigure',
|
||||
type: 'html',
|
||||
value: 'ONVIF autoconfiguration will configure the camera codecs. <b>The camera motion sensor must still be <a target="_blank" href="https://docs.scrypted.app/camera-preparation.html#motion-sensor-setup">configured manually</a>.</b>',
|
||||
};
|
||||
|
||||
const MEGABIT = 1024 * 1000;
|
||||
|
||||
function getBitrateForResolution(resolution: number) {
|
||||
if (resolution >= 3840 * 2160)
|
||||
return 8 * MEGABIT;
|
||||
if (resolution >= 2688 * 1520)
|
||||
return 3 * MEGABIT;
|
||||
if (resolution >= 1920 * 1080)
|
||||
return 2 * MEGABIT;
|
||||
if (resolution >= 1280 * 720)
|
||||
return MEGABIT;
|
||||
if (resolution >= 640 * 480)
|
||||
return MEGABIT / 2;
|
||||
return MEGABIT / 4;
|
||||
}
|
||||
|
||||
export async function checkPluginNeedsAutoConfigure(plugin: ScryptedDeviceBase, extraDevices = 0) {
|
||||
if (plugin.storage.getItem('autoconfigure') === 'true')
|
||||
return;
|
||||
|
||||
plugin.storage.setItem('autoconfigure', 'true');
|
||||
if (sdk.deviceManager.getNativeIds().length <= 1 + extraDevices)
|
||||
return;
|
||||
plugin.log.a(`${plugin.name} now has support for automatic camera configuration for optimal performance. Cameras can be autoconfigured in their respective settings.`);
|
||||
}
|
||||
|
||||
export async function autoconfigureCodecs(
|
||||
getCodecs: () => Promise<MediaStreamOptions[]>,
|
||||
configureCodecs: (options: MediaStreamOptions) => Promise<MediaStreamConfiguration>,
|
||||
audioOptions?: AudioStreamOptions,
|
||||
) {
|
||||
audioOptions ||= {
|
||||
codec: 'pcm_mulaw',
|
||||
bitrate: 64000,
|
||||
sampleRate: 8000,
|
||||
};
|
||||
|
||||
const codecs = await getCodecs();
|
||||
const configurable: MediaStreamConfiguration[] = [];
|
||||
for (const codec of codecs) {
|
||||
const config = await configureCodecs({
|
||||
id: codec.id,
|
||||
});
|
||||
configurable.push(config);
|
||||
}
|
||||
|
||||
const used: MediaStreamConfiguration[] = [];
|
||||
|
||||
for (const _ of ['local', 'remote', 'low-resolution'] as MediaStreamDestination[]) {
|
||||
// find stream with the highest configurable resolution.
|
||||
let highest: [MediaStreamConfiguration, number] = [undefined, 0];
|
||||
for (const codec of configurable) {
|
||||
if (used.includes(codec))
|
||||
continue;
|
||||
for (const resolution of codec.video.resolutions) {
|
||||
if (resolution[0] * resolution[1] > highest[1]) {
|
||||
highest = [codec, resolution[0] * resolution[1]];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const config = highest[0];
|
||||
if (!config)
|
||||
break;
|
||||
|
||||
used.push(config);
|
||||
}
|
||||
|
||||
const findResolutionTarget = (config: MediaStreamConfiguration, width: number, height: number) => {
|
||||
let diff = 999999999;
|
||||
let ret: [number, number];
|
||||
|
||||
const targetArea = width * height;
|
||||
for (const res of config.video.resolutions) {
|
||||
const actualArea = res[0] * res[1];
|
||||
const diffArea = Math.abs(targetArea - actualArea);
|
||||
if (diffArea < diff) {
|
||||
diff = diffArea;
|
||||
ret = res;
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
// find the highest resolution
|
||||
const l = used[0];
|
||||
const resolution = findResolutionTarget(l, 8192, 8192);
|
||||
|
||||
// get the fps of 20 or highest available
|
||||
let fps = Math.min(20, Math.max(...l.video.fpsRange));
|
||||
|
||||
let errors = '';
|
||||
|
||||
const logConfigureCodecs = async (config: MediaStreamConfiguration) => {
|
||||
try {
|
||||
await configureCodecs(config);
|
||||
}
|
||||
catch (e) {
|
||||
errors += e;
|
||||
}
|
||||
}
|
||||
|
||||
await logConfigureCodecs({
|
||||
id: l.id,
|
||||
video: {
|
||||
width: resolution[0],
|
||||
height: resolution[1],
|
||||
bitrateControl: 'variable',
|
||||
codec: 'h264',
|
||||
bitrate: getBitrateForResolution(resolution[0] * resolution[1]),
|
||||
fps,
|
||||
keyframeInterval: fps * 4,
|
||||
quality: 5,
|
||||
profile: 'main',
|
||||
},
|
||||
audio: audioOptions,
|
||||
});
|
||||
|
||||
if (used.length === 3) {
|
||||
// find remote and low
|
||||
const r = used[1];
|
||||
const l = used[2];
|
||||
|
||||
const rResolution = findResolutionTarget(r, 1280, 720);
|
||||
const lResolution = findResolutionTarget(l, 640, 360);
|
||||
|
||||
fps = Math.min(20, Math.max(...r.video.fpsRange));
|
||||
await logConfigureCodecs({
|
||||
id: r.id,
|
||||
video: {
|
||||
width: rResolution[0],
|
||||
height: rResolution[1],
|
||||
bitrateControl: 'variable',
|
||||
codec: 'h264',
|
||||
bitrate: 1 * MEGABIT,
|
||||
fps,
|
||||
keyframeInterval: fps * 4,
|
||||
quality: 5,
|
||||
profile: 'main',
|
||||
},
|
||||
audio: audioOptions,
|
||||
});
|
||||
|
||||
fps = Math.min(20, Math.max(...l.video.fpsRange));
|
||||
await logConfigureCodecs({
|
||||
id: l.id,
|
||||
video: {
|
||||
width: lResolution[0],
|
||||
height: lResolution[1],
|
||||
bitrateControl: 'variable',
|
||||
codec: 'h264',
|
||||
bitrate: MEGABIT / 2,
|
||||
fps,
|
||||
keyframeInterval: fps * 4,
|
||||
quality: 5,
|
||||
profile: 'main',
|
||||
},
|
||||
audio: audioOptions,
|
||||
});
|
||||
}
|
||||
else if (used.length == 2) {
|
||||
let target: [number, number];
|
||||
if (resolution[0] * resolution[1] > 1920 * 1080)
|
||||
target = [1280, 720];
|
||||
else
|
||||
target = [640, 360];
|
||||
|
||||
const rResolution = findResolutionTarget(used[1], target[0], target[1]);
|
||||
const fps = Math.min(20, Math.max(...used[1].video.fpsRange));
|
||||
await logConfigureCodecs({
|
||||
id: used[1].id,
|
||||
video: {
|
||||
width: rResolution[0],
|
||||
height: rResolution[1],
|
||||
bitrateControl: 'variable',
|
||||
codec: 'h264',
|
||||
bitrate: getBitrateForResolution(rResolution[0] * rResolution[1]),
|
||||
fps,
|
||||
keyframeInterval: fps * 4,
|
||||
quality: 5,
|
||||
profile: 'main',
|
||||
},
|
||||
audio: audioOptions,
|
||||
});
|
||||
}
|
||||
else if (used.length === 1) {
|
||||
// no nop
|
||||
}
|
||||
|
||||
if (errors)
|
||||
throw new Error(errors);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ const { systemManager } = sdk;
|
||||
export abstract class AutoenableMixinProvider extends ScryptedDeviceBase {
|
||||
hasEnabledMixin: { [id: string]: string } = {};
|
||||
pluginsComponent: Promise<any>;
|
||||
unshiftMixin = false;
|
||||
|
||||
constructor(nativeId?: string, public autoIncludeToken = 'v4') {
|
||||
super(nativeId);
|
||||
@@ -40,19 +41,11 @@ export abstract class AutoenableMixinProvider extends ScryptedDeviceBase {
|
||||
return true;
|
||||
}
|
||||
|
||||
checkHasEnabledMixin(device: ScryptedDevice) {
|
||||
return this.hasEnabledMixin[device.id] === this.autoIncludeToken;
|
||||
}
|
||||
|
||||
shouldUnshiftMixin(device: ScryptedDevice) {
|
||||
return false;
|
||||
}
|
||||
|
||||
async maybeEnableMixin(device: ScryptedDevice) {
|
||||
if (!device || device.mixins?.includes(this.id))
|
||||
return;
|
||||
|
||||
if (this.checkHasEnabledMixin(device))
|
||||
if (this.hasEnabledMixin[device.id] === this.autoIncludeToken)
|
||||
return;
|
||||
|
||||
const match = await this.canMixin(device.type, device.interfaces);
|
||||
@@ -64,7 +57,7 @@ export abstract class AutoenableMixinProvider extends ScryptedDeviceBase {
|
||||
|
||||
this.log.i('auto enabling mixin for ' + device.name)
|
||||
const mixins = (device.mixins || []).slice();
|
||||
if (this.shouldUnshiftMixin(device))
|
||||
if (this.unshiftMixin)
|
||||
mixins.unshift(this.id);
|
||||
else
|
||||
mixins.push(this.id);
|
||||
@@ -80,5 +73,5 @@ export abstract class AutoenableMixinProvider extends ScryptedDeviceBase {
|
||||
this.storage.setItem('hasEnabledMixin', JSON.stringify(this.hasEnabledMixin));
|
||||
}
|
||||
|
||||
abstract canMixin(type: ScryptedDeviceType | string, interfaces: string[]): Promise<string[] | null | undefined | void>;
|
||||
abstract canMixin(type: ScryptedDeviceType, interfaces: string[]): Promise<string[] | null | undefined | void>;
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../server/src/deferred.ts
|
||||
17
common/src/deferred.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
export class Deferred<T> {
|
||||
finished = false;
|
||||
resolve!: (value: T|PromiseLike<T>) => this;
|
||||
reject!: (error: Error) => this;
|
||||
promise: Promise<T> = new Promise((resolve, reject) => {
|
||||
this.resolve = v => {
|
||||
this.finished = true;
|
||||
resolve(v);
|
||||
return this;
|
||||
};
|
||||
this.reject = e => {
|
||||
this.finished = true;
|
||||
reject(e);
|
||||
return this;
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import type { SystemManager } from '@scrypted/types';
|
||||
|
||||
export function getAllDevices<T>(systemManager: SystemManager) {
|
||||
return Object.keys(systemManager.getSystemState()).map(id => systemManager.getDeviceById<T>(id));
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
import type * as monacoEditor from 'monaco-editor';
|
||||
|
||||
export interface StandardLibs {
|
||||
'@types/node/globals.d.ts': string,
|
||||
'@types/node/module.d.ts': string,
|
||||
'@types/node/buffer.d.ts': string,
|
||||
'@types/node/process.d.ts': string,
|
||||
'@types/node/events.d.ts': string,
|
||||
'@types/node/stream.d.ts': string,
|
||||
'@types/node/fs.d.ts': string,
|
||||
'@types/node/net.d.ts': string,
|
||||
'@types/node/child_process.d.ts': string,
|
||||
}
|
||||
|
||||
export interface ScryptedLibs {
|
||||
'@types/sdk/settings-mixin.d.ts': string,
|
||||
'@types/sdk/storage-settings.d.ts': string,
|
||||
'@types/sdk/types.d.ts': string,
|
||||
'@types/sdk/index.d.ts': string,
|
||||
}
|
||||
|
||||
export function createMonacoEvalDefaultsWithLibs(standardLibs: StandardLibs, scryptedLibs: ScryptedLibs, extraLibs: { [lib: string]: string }) {
|
||||
// const libs = Object.assign(scryptedLibs, extraLibs);
|
||||
|
||||
function monacoEvalDefaultsFunction(monaco: typeof monacoEditor, standardLibs: StandardLibs, scryptedLibs: ScryptedLibs, extraLibs: { [lib: string]: string }) {
|
||||
monaco.languages.typescript.typescriptDefaults.setDiagnosticsOptions(
|
||||
Object.assign(
|
||||
{},
|
||||
monaco.languages.typescript.typescriptDefaults.getDiagnosticsOptions(),
|
||||
{
|
||||
diagnosticCodesToIgnore: [1108, 1375, 1378],
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
monaco.languages.typescript.typescriptDefaults.setCompilerOptions(
|
||||
Object.assign(
|
||||
{},
|
||||
monaco.languages.typescript.typescriptDefaults.getCompilerOptions(),
|
||||
{
|
||||
moduleResolution:
|
||||
monaco.languages.typescript.ModuleResolutionKind.NodeJs,
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const libs: any = {
|
||||
...scryptedLibs,
|
||||
...extraLibs,
|
||||
};
|
||||
|
||||
const catLibs = Object.values(libs).join('\n');
|
||||
const catlibsNoExport = Object.keys(libs)
|
||||
.map(lib => libs[lib]).map(lib =>
|
||||
lib.toString().replace(/export /g, '').replace(/import.*?/g, ''))
|
||||
.join('\n');
|
||||
monaco.languages.typescript.typescriptDefaults.addExtraLib(`
|
||||
${catLibs}
|
||||
|
||||
declare global {
|
||||
${catlibsNoExport}
|
||||
|
||||
const log: Logger;
|
||||
|
||||
const deviceManager: DeviceManager;
|
||||
const endpointManager: EndpointManager;
|
||||
const mediaManager: MediaManager;
|
||||
const systemManager: SystemManager;
|
||||
|
||||
const eventSource: ScryptedDevice;
|
||||
const eventDetails: EventDetails;
|
||||
const eventData: any;
|
||||
}
|
||||
`,
|
||||
|
||||
"node_modules/@types/scrypted__sdk/types/index.d.ts"
|
||||
);
|
||||
|
||||
for (const lib of Object.keys(standardLibs)) {
|
||||
monaco.languages.typescript.typescriptDefaults.addExtraLib(
|
||||
standardLibs[lib as keyof StandardLibs],
|
||||
lib,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return `(function() {
|
||||
const standardLibs = ${JSON.stringify(standardLibs)};
|
||||
const scryptedLibs = ${JSON.stringify(scryptedLibs)};
|
||||
const extraLibs = ${JSON.stringify(extraLibs)};
|
||||
|
||||
return (monaco) => {
|
||||
(${monacoEvalDefaultsFunction})(monaco, standardLibs, scryptedLibs, extraLibs);
|
||||
}
|
||||
})();
|
||||
`;
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
import type { ScryptedDeviceBase } from "@scrypted/sdk";
|
||||
|
||||
export interface ScriptDevice {
|
||||
/**
|
||||
* @deprecated Use the default export to specify the device handler.
|
||||
@@ -8,5 +6,3 @@ export interface ScriptDevice {
|
||||
handle<T>(handler?: T & object): void;
|
||||
handleTypes(...interfaces: string[]): void;
|
||||
}
|
||||
|
||||
export declare const device: ScryptedDeviceBase & ScriptDevice;
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import sdk, { LockState, MixinDeviceBase, PanTiltZoomMovement, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors, ScryptedMimeTypes } from "@scrypted/sdk";
|
||||
import { SettingsMixinDeviceBase } from "@scrypted/sdk/settings-mixin";
|
||||
import { StorageSettings } from "@scrypted/sdk/storage-settings";
|
||||
import sdk, { MixinDeviceBase, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors } from "@scrypted/sdk";
|
||||
import fs from 'fs';
|
||||
import type { TranspileOptions } from "typescript";
|
||||
import vm from "vm";
|
||||
import { createMonacoEvalDefaultsWithLibs, ScryptedLibs, StandardLibs } from "./monaco-libs";
|
||||
import { ScriptDevice } from "./monaco/script-device";
|
||||
import path from 'path';
|
||||
|
||||
const { systemManager, deviceManager, mediaManager, endpointManager } = sdk;
|
||||
|
||||
@@ -29,18 +27,18 @@ export function readFileAsString(f: string) {
|
||||
return fs.readFileSync(f).toString();;
|
||||
}
|
||||
|
||||
function getScryptedLibs(): ScryptedLibs {
|
||||
function getTypeDefs() {
|
||||
const scryptedTypesDefs = readFileAsString('@types/sdk/types.d.ts');
|
||||
const scryptedIndexDefs = readFileAsString('@types/sdk/index.d.ts');
|
||||
return {
|
||||
"@types/sdk/index.d.ts": readFileAsString('@types/sdk/index.d.ts'),
|
||||
"@types/sdk/settings-mixin.d.ts": readFileAsString('@types/sdk/settings-mixin.d.ts'),
|
||||
"@types/sdk/storage-settings.d.ts": readFileAsString('@types/sdk/storage-settings.d.ts'),
|
||||
"@types/sdk/types.d.ts": readFileAsString('@types/sdk/types.d.ts'),
|
||||
}
|
||||
scryptedIndexDefs,
|
||||
scryptedTypesDefs,
|
||||
};
|
||||
}
|
||||
|
||||
export async function scryptedEval(device: ScryptedDeviceBase, script: string, extraLibs: { [lib: string]: string }, params: { [name: string]: any }) {
|
||||
const libs = Object.assign({
|
||||
types: getScryptedLibs()['@types/sdk/types.d.ts'],
|
||||
types: getTypeDefs().scryptedTypesDefs,
|
||||
}, extraLibs);
|
||||
const allScripts = Object.values(libs).join('\n').toString() + script;
|
||||
let compiled: string;
|
||||
@@ -63,9 +61,9 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
|
||||
|
||||
const allParams = Object.assign({}, params, {
|
||||
sdk,
|
||||
fs: require('realfs'),
|
||||
ScryptedDeviceBase,
|
||||
MixinDeviceBase,
|
||||
StorageSettings,
|
||||
systemManager,
|
||||
deviceManager,
|
||||
endpointManager,
|
||||
@@ -75,9 +73,6 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
|
||||
localStorage: device.storage,
|
||||
device,
|
||||
exports: {} as any,
|
||||
PanTiltZoomMovement,
|
||||
SettingsMixinDeviceBase,
|
||||
ScryptedMimeTypes,
|
||||
ScryptedInterface,
|
||||
ScryptedDeviceType,
|
||||
// @ts-expect-error
|
||||
@@ -114,19 +109,92 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
|
||||
}
|
||||
|
||||
export function createMonacoEvalDefaults(extraLibs: { [lib: string]: string }) {
|
||||
const standardlibs: StandardLibs = {
|
||||
"@types/node/globals.d.ts": readFileAsString('@types/node/globals.d.ts'),
|
||||
"@types/node/module.d.ts": readFileAsString('@types/node/module.d.ts'),
|
||||
"@types/node/buffer.d.ts": readFileAsString('@types/node/buffer.d.ts'),
|
||||
"@types/node/process.d.ts": readFileAsString('@types/node/process.d.ts'),
|
||||
"@types/node/events.d.ts": readFileAsString('@types/node/events.d.ts'),
|
||||
"@types/node/stream.d.ts": readFileAsString('@types/node/stream.d.ts'),
|
||||
"@types/node/fs.d.ts": readFileAsString('@types/node/fs.d.ts'),
|
||||
"@types/node/net.d.ts": readFileAsString('@types/node/net.d.ts'),
|
||||
"@types/node/child_process.d.ts": readFileAsString('@types/node/child_process.d.ts'),
|
||||
};
|
||||
const safeLibs: any = {};
|
||||
|
||||
return createMonacoEvalDefaultsWithLibs(standardlibs, getScryptedLibs(), extraLibs);
|
||||
for (const safeLib of [
|
||||
'@types/node/globals.d.ts',
|
||||
'@types/node/buffer.d.ts',
|
||||
'@types/node/process.d.ts',
|
||||
'@types/node/events.d.ts',
|
||||
'@types/node/stream.d.ts',
|
||||
'@types/node/fs.d.ts',
|
||||
'@types/node/net.d.ts',
|
||||
'@types/node/child_process.d.ts',
|
||||
]) {
|
||||
safeLibs[`node_modules/${safeLib}`] = readFileAsString(safeLib)
|
||||
}
|
||||
|
||||
const libs = Object.assign(getTypeDefs(), extraLibs);
|
||||
|
||||
function monacoEvalDefaultsFunction(monaco: any, safeLibs: any, libs: any) {
|
||||
monaco.languages.typescript.typescriptDefaults.setDiagnosticsOptions(
|
||||
Object.assign(
|
||||
{},
|
||||
monaco.languages.typescript.typescriptDefaults.getDiagnosticsOptions(),
|
||||
{
|
||||
diagnosticCodesToIgnore: [1108, 1375, 1378],
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
monaco.languages.typescript.typescriptDefaults.setCompilerOptions(
|
||||
Object.assign(
|
||||
{},
|
||||
monaco.languages.typescript.typescriptDefaults.getCompilerOptions(),
|
||||
{
|
||||
moduleResolution:
|
||||
monaco.languages.typescript.ModuleResolutionKind.NodeJs,
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const catLibs = Object.values(libs).join('\n');
|
||||
const catlibsNoExport = Object.keys(libs).filter(lib => lib !== 'sdk')
|
||||
.map(lib => libs[lib]).map(lib =>
|
||||
lib.toString().replace(/export /g, '').replace(/import.*?/g, ''))
|
||||
.join('\n');
|
||||
monaco.languages.typescript.typescriptDefaults.addExtraLib(`
|
||||
${catLibs}
|
||||
|
||||
declare global {
|
||||
${catlibsNoExport}
|
||||
|
||||
const log: Logger;
|
||||
|
||||
const deviceManager: DeviceManager;
|
||||
const endpointManager: EndpointManager;
|
||||
const mediaManager: MediaManager;
|
||||
const systemManager: SystemManager;
|
||||
const mqtt: MqttClient;
|
||||
const device: ScryptedDeviceBase & { pathname : string };
|
||||
}
|
||||
`,
|
||||
|
||||
"node_modules/@types/scrypted__sdk/types/index.d.ts"
|
||||
);
|
||||
|
||||
monaco.languages.typescript.typescriptDefaults.addExtraLib(
|
||||
libs['sdk'],
|
||||
"node_modules/@types/scrypted__sdk/index.d.ts"
|
||||
);
|
||||
|
||||
for (const lib of Object.keys(safeLibs)) {
|
||||
monaco.languages.typescript.typescriptDefaults.addExtraLib(
|
||||
safeLibs[lib],
|
||||
lib,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return `(function() {
|
||||
const safeLibs = ${JSON.stringify(safeLibs)};
|
||||
const libs = ${JSON.stringify(libs)};
|
||||
|
||||
return (monaco) => {
|
||||
(${monacoEvalDefaultsFunction})(monaco, safeLibs, libs);
|
||||
}
|
||||
})();
|
||||
`;
|
||||
}
|
||||
|
||||
export interface ScriptDeviceImpl extends ScriptDevice {
|
||||
|
||||
@@ -19,7 +19,7 @@ function isPi(model: string) {
|
||||
export function isRaspberryPi() {
|
||||
let cpuInfo: string;
|
||||
try {
|
||||
cpuInfo = require('fs').readFileSync('/proc/cpuinfo', { encoding: 'utf8' });
|
||||
cpuInfo = require('realfs').readFileSync('/proc/cpuinfo', { encoding: 'utf8' });
|
||||
}
|
||||
catch (e) {
|
||||
// if this fails, this is probably not a pi
|
||||
@@ -70,7 +70,11 @@ export function getH264DecoderArgs(): CodecArgs {
|
||||
],
|
||||
};
|
||||
|
||||
if (os.platform() === 'linux') {
|
||||
if (isRaspberryPi()) {
|
||||
ret['Raspberry Pi'] = ['-c:v', 'h264_mmal'];
|
||||
ret[V4L2] = ['-c:v', 'h264_v4l2m2m'];
|
||||
}
|
||||
else if (os.platform() === 'linux') {
|
||||
ret[V4L2] = ['-c:v', 'h264_v4l2m2m'];
|
||||
}
|
||||
else if (os.platform() === 'win32') {
|
||||
|
||||
@@ -1,21 +1,29 @@
|
||||
import { createActivityTimeout } from '@scrypted/common/src/activity-timeout';
|
||||
import { cloneDeep } from '@scrypted/common/src/clone-deep';
|
||||
import { Deferred } from "@scrypted/common/src/deferred";
|
||||
import { listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
|
||||
import { createRtspParser } from "@scrypted/common/src/rtsp-server";
|
||||
import { StreamChunk, StreamParser } from '@scrypted/common/src/stream-parser';
|
||||
import sdk, { FFmpegInput, RequestMediaStreamOptions, ResponseMediaStreamOptions } from "@scrypted/sdk";
|
||||
import child_process, { ChildProcess, StdioOptions } from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import { Server } from 'net';
|
||||
import { Duplex } from 'stream';
|
||||
import { cloneDeep } from './clone-deep';
|
||||
import { Deferred } from "./deferred";
|
||||
import { listenZeroSingleClient } from './listen-cluster';
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from './media-helpers';
|
||||
import { createRtspParser } from "./rtsp-server";
|
||||
import { parseSdp } from "./sdp-utils";
|
||||
import { StreamChunk, StreamParser } from './stream-parser';
|
||||
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
export interface ParserSession<T extends string> {
|
||||
parserSpecific?: any;
|
||||
sdp: Promise<string>;
|
||||
sdp: Promise<Buffer[]>;
|
||||
resetActivityTimer?: () => void,
|
||||
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions, inputVideoCodec: string, inputAudioCodec: string): ResponseMediaStreamOptions;
|
||||
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions): ResponseMediaStreamOptions;
|
||||
inputAudioCodec?: string;
|
||||
inputVideoCodec?: string;
|
||||
inputVideoResolution?: {
|
||||
width: number,
|
||||
height: number,
|
||||
},
|
||||
start(): void;
|
||||
kill(error?: Error): void;
|
||||
killed: Promise<void>;
|
||||
@@ -23,7 +31,6 @@ export interface ParserSession<T extends string> {
|
||||
|
||||
emit(container: T, chunk: StreamChunk): this;
|
||||
on(container: T, callback: (chunk: StreamChunk) => void): this;
|
||||
on(error: 'error', callback: (e: Error) => void): this;
|
||||
removeListener(event: T | 'killed', callback: any): this;
|
||||
once(event: T | 'killed', listener: (...args: any[]) => void): this;
|
||||
}
|
||||
@@ -95,37 +102,65 @@ export async function parseAudioCodec(cp: ChildProcess) {
|
||||
export function setupActivityTimer(container: string, kill: (error?: Error) => void, events: {
|
||||
once(event: 'killed', callback: () => void): void,
|
||||
}, timeout: number) {
|
||||
const ret = createActivityTimeout(timeout, () => {
|
||||
let dataTimeout: NodeJS.Timeout;
|
||||
|
||||
function dataKill() {
|
||||
const str = 'timeout waiting for data, killing parser session';
|
||||
console.error(str, container);
|
||||
kill(new Error(str));
|
||||
});
|
||||
events.once('killed', () => ret.clearActivityTimer());
|
||||
return ret;
|
||||
}
|
||||
|
||||
let lastTime = Date.now();
|
||||
function resetActivityTimer() {
|
||||
lastTime = Date.now();
|
||||
}
|
||||
|
||||
function clearActivityTimer() {
|
||||
clearInterval(dataTimeout);
|
||||
}
|
||||
|
||||
if (timeout) {
|
||||
dataTimeout = setInterval(() => {
|
||||
if (Date.now() > lastTime + timeout) {
|
||||
clearInterval(dataTimeout);
|
||||
dataTimeout = undefined;
|
||||
dataKill();
|
||||
}
|
||||
}, timeout);
|
||||
}
|
||||
|
||||
events.once('killed', () => clearInterval(dataTimeout));
|
||||
|
||||
resetActivityTimer();
|
||||
return {
|
||||
resetActivityTimer,
|
||||
clearActivityTimer,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export async function startParserSession<T extends string>(ffmpegInput: FFmpegInput, options: ParserOptions<T>): Promise<ParserSession<T>> {
|
||||
const { console } = options;
|
||||
|
||||
let isActive = true;
|
||||
const events = new EventEmitter();
|
||||
// need this to prevent kill from throwing due to uncaught Error during cleanup
|
||||
events.on('error', () => {});
|
||||
events.on('error', e => console.error('rebroadcast error', e));
|
||||
|
||||
let inputAudioCodec: string;
|
||||
let inputVideoCodec: string;
|
||||
let inputVideoResolution: string[];
|
||||
|
||||
let sessionKilled: any;
|
||||
const killed = new Promise<void>(resolve => {
|
||||
sessionKilled = resolve;
|
||||
});
|
||||
|
||||
const sdpDeferred = new Deferred<string>();
|
||||
function kill(error?: Error) {
|
||||
error ||= new Error('killed');
|
||||
if (isActive) {
|
||||
events.emit('killed');
|
||||
events.emit('error', error);
|
||||
events.emit('error', error || new Error('killed'));
|
||||
}
|
||||
if (!sdpDeferred.finished)
|
||||
sdpDeferred.reject(error);
|
||||
isActive = false;
|
||||
sessionKilled();
|
||||
safeKillFFmpeg(cp);
|
||||
@@ -133,7 +168,6 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
|
||||
|
||||
const args = ffmpegInput.inputArguments.slice();
|
||||
const env = ffmpegInput.env ? { ...process.env, ...ffmpegInput.env } : undefined;
|
||||
|
||||
const ensureActive = (killed: () => void) => {
|
||||
if (!isActive) {
|
||||
@@ -151,7 +185,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
const parser: StreamParser = options.parsers[container as T];
|
||||
|
||||
if (parser.tcpProtocol) {
|
||||
const tcp = await listenZeroSingleClient('127.0.0.1');
|
||||
const tcp = await listenZeroSingleClient();
|
||||
const url = new URL(parser.tcpProtocol);
|
||||
url.port = tcp.port.toString();
|
||||
args.push(
|
||||
@@ -166,7 +200,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
try {
|
||||
ensureActive(() => socket.destroy());
|
||||
|
||||
for await (const chunk of parser.parse(socket, undefined, undefined)) {
|
||||
for await (const chunk of parser.parse(socket, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
|
||||
events.emit(container, chunk);
|
||||
resetActivityTimer();
|
||||
}
|
||||
@@ -189,9 +223,8 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
// start ffmpeg process with child process pipes
|
||||
args.unshift('-hide_banner');
|
||||
safePrintFFmpegArguments(console, args);
|
||||
const cp = child_process.spawn(ffmpegInput.ffmpegPath || await mediaManager.getFFmpegPath(), args, {
|
||||
const cp = child_process.spawn(await mediaManager.getFFmpegPath(), args, {
|
||||
stdio,
|
||||
env,
|
||||
});
|
||||
ffmpegLogInitialOutput(console, cp, undefined, options?.storage);
|
||||
cp.on('exit', () => kill(new Error('ffmpeg exited')));
|
||||
@@ -214,7 +247,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
try {
|
||||
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
|
||||
|
||||
for await (const chunk of parser.parse(pipe as any, undefined, undefined)) {
|
||||
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
|
||||
await deferredStart.promise;
|
||||
events.emit(container, chunk);
|
||||
resetActivityTimer();
|
||||
@@ -229,23 +262,42 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
|
||||
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
|
||||
rtsp.sdp.then(sdp => {
|
||||
console?.log('sdp received from ffmpeg', sdp);
|
||||
sdpDeferred.resolve(sdp);
|
||||
const parsed = parseSdp(sdp);
|
||||
const audio = parsed.msections.find(msection => msection.type === 'audio');
|
||||
const video = parsed.msections.find(msection => msection.type === 'video');
|
||||
inputVideoCodec = video?.codec;
|
||||
inputAudioCodec = audio?.codec;
|
||||
});
|
||||
|
||||
const sdp = new Deferred<Buffer[]>();
|
||||
rtsp.sdp.then(r => sdp.resolve([Buffer.from(r)]));
|
||||
killed.then(() => sdp.reject(new Error("ffmpeg killed before sdp could be parsed")));
|
||||
|
||||
start();
|
||||
|
||||
return {
|
||||
start() {
|
||||
deferredStart.resolve();
|
||||
},
|
||||
sdp: sdpDeferred.promise,
|
||||
sdp: sdp.promise,
|
||||
get inputAudioCodec() {
|
||||
return inputAudioCodec;
|
||||
},
|
||||
get inputVideoCodec() {
|
||||
return inputVideoCodec;
|
||||
},
|
||||
get inputVideoResolution() {
|
||||
return {
|
||||
width: parseInt(inputVideoResolution?.[2]),
|
||||
height: parseInt(inputVideoResolution?.[3]),
|
||||
}
|
||||
},
|
||||
get isActive() { return isActive },
|
||||
kill(error?: Error) {
|
||||
kill(error);
|
||||
},
|
||||
killed,
|
||||
negotiateMediaStream: (requestMediaStream: RequestMediaStreamOptions, inputVideoCodec, inputAudioCodec) => {
|
||||
negotiateMediaStream: () => {
|
||||
const ret: ResponseMediaStreamOptions = cloneDeep(ffmpegInput.mediaStreamOptions) || {
|
||||
id: undefined,
|
||||
name: undefined,
|
||||
@@ -287,3 +339,64 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export interface Rebroadcaster {
|
||||
server: Server;
|
||||
port: number;
|
||||
url: string;
|
||||
clients: number;
|
||||
}
|
||||
|
||||
export interface RebroadcastSessionCleanup {
|
||||
(): void;
|
||||
}
|
||||
|
||||
export interface RebroadcasterConnection {
|
||||
writeData: (data: StreamChunk) => number;
|
||||
destroy: () => void;
|
||||
}
|
||||
|
||||
export interface RebroadcasterOptions {
|
||||
connect?: (connection: RebroadcasterConnection) => RebroadcastSessionCleanup | undefined;
|
||||
console?: Console;
|
||||
idle?: {
|
||||
timeout: number,
|
||||
callback: () => void,
|
||||
},
|
||||
}
|
||||
|
||||
export function handleRebroadcasterClient(socket: Duplex, options?: RebroadcasterOptions) {
|
||||
const firstWriteData = (data: StreamChunk) => {
|
||||
if (data.startStream) {
|
||||
socket.write(data.startStream)
|
||||
}
|
||||
connection.writeData = writeData;
|
||||
return writeData(data);
|
||||
}
|
||||
const writeData = (data: StreamChunk) => {
|
||||
for (const chunk of data.chunks) {
|
||||
socket.write(chunk);
|
||||
}
|
||||
|
||||
return socket.writableLength;
|
||||
};
|
||||
|
||||
const destroy = () => {
|
||||
const cb = cleanupCallback;
|
||||
cleanupCallback = undefined;
|
||||
socket.destroy();
|
||||
cb?.();
|
||||
}
|
||||
|
||||
const connection: RebroadcasterConnection = {
|
||||
writeData: firstWriteData,
|
||||
destroy,
|
||||
};
|
||||
|
||||
let cleanupCallback = options?.connect(connection);
|
||||
|
||||
socket.once('close', () => {
|
||||
destroy();
|
||||
});
|
||||
socket.on('error', e => options?.console?.log('client stream ended'));
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
|
||||
export function safeParseJson(value: string) {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
}
|
||||
@@ -79,4 +79,4 @@ export async function bind(server: dgram.Socket, port: number) {
|
||||
}
|
||||
}
|
||||
|
||||
export { ListenZeroSingleClientTimeoutError, listenZero, listenZeroSingleClient } from "../../server/src/listen-zero";
|
||||
export { ListenZeroSingleClientTimeoutError, listenZero, listenZeroSingleClient } from "@scrypted/server/src/listen-zero";
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { safeKillFFmpeg, ffmpegLogInitialOutput, safePrintFFmpegArguments } from '../../server/src/media-helpers';
|
||||
export * from '@scrypted/server/src/media-helpers';
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
../../server/src/promise-utils.ts
|
||||
96
common/src/promise-utils.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
export interface RefreshPromise<T> {
|
||||
promise: Promise<T>;
|
||||
cacheDuration: number;
|
||||
}
|
||||
|
||||
export function singletonPromise<T>(rp: undefined | RefreshPromise<T>, method: () => Promise<T>, cacheDuration = 0) {
|
||||
if (rp?.promise)
|
||||
return rp;
|
||||
|
||||
const promise = method();
|
||||
if (!rp) {
|
||||
rp = {
|
||||
promise,
|
||||
cacheDuration,
|
||||
}
|
||||
}
|
||||
else {
|
||||
rp.promise = promise;
|
||||
}
|
||||
promise.finally(() => setTimeout(() => rp.promise = undefined, rp.cacheDuration));
|
||||
return rp;
|
||||
}
|
||||
|
||||
export class TimeoutError<T> extends Error {
|
||||
constructor(public promise: Promise<T>) {
|
||||
super('Operation Timed Out');
|
||||
}
|
||||
}
|
||||
|
||||
export function timeoutPromise<T>(timeout: number, promise: Promise<T>): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
const t = setTimeout(() => reject(new TimeoutError(promise)), timeout);
|
||||
|
||||
promise
|
||||
.then(v => {
|
||||
clearTimeout(t);
|
||||
resolve(v);
|
||||
})
|
||||
.catch(e => {
|
||||
clearTimeout(t);
|
||||
reject(e);
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
export function timeoutFunction<T>(timeout: number, f: (isTimedOut: () => boolean) => Promise<T>): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
let isTimedOut = false;
|
||||
const promise = f(() => isTimedOut);
|
||||
|
||||
const t = setTimeout(() => {
|
||||
isTimedOut = true;
|
||||
reject(new TimeoutError(promise));
|
||||
}, timeout);
|
||||
|
||||
promise
|
||||
.then(v => {
|
||||
clearTimeout(t);
|
||||
resolve(v);
|
||||
})
|
||||
.catch(e => {
|
||||
clearTimeout(t);
|
||||
reject(e);
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
export function createPromiseDebouncer<T>() {
|
||||
let current: Promise<T>;
|
||||
|
||||
return (func: () => Promise<T>): Promise<T> => {
|
||||
if (!current)
|
||||
current = func().finally(() => current = undefined);
|
||||
return current;
|
||||
}
|
||||
}
|
||||
|
||||
export function createMapPromiseDebouncer<T>() {
|
||||
const map = new Map<string, Promise<T>>();
|
||||
|
||||
return (key: any, debounce: number, func: () => Promise<T>): Promise<T> => {
|
||||
const keyStr = JSON.stringify(key);
|
||||
let value = map.get(keyStr);
|
||||
if (!value) {
|
||||
value = func().finally(() => {
|
||||
if (!debounce) {
|
||||
map.delete(keyStr);
|
||||
return;
|
||||
}
|
||||
setTimeout(() => map.delete(keyStr), debounce);
|
||||
});
|
||||
map.set(keyStr, value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
}
|
||||
@@ -54,18 +54,18 @@ export async function read16BELengthLoop(readable: Readable, options: {
|
||||
readable.on('readable', read);
|
||||
|
||||
await once(readable, 'end');
|
||||
throw new StreamEndError('read16BELengthLoop');
|
||||
throw new Error('stream ended');
|
||||
}
|
||||
|
||||
export class StreamEndError extends Error {
|
||||
constructor(where: string) {
|
||||
super(`stream ended: ${where}`);
|
||||
constructor() {
|
||||
super('stream ended');
|
||||
}
|
||||
}
|
||||
|
||||
export async function readLength(readable: Readable, length: number): Promise<Buffer> {
|
||||
if (readable.readableEnded || readable.destroyed)
|
||||
throw new StreamEndError('readLength start');
|
||||
throw new StreamEndError();
|
||||
|
||||
if (!length) {
|
||||
return Buffer.alloc(0);
|
||||
@@ -88,12 +88,12 @@ export async function readLength(readable: Readable, length: number): Promise<Bu
|
||||
}
|
||||
|
||||
if (readable.readableEnded || readable.destroyed)
|
||||
reject(new StreamEndError('readLength readable'));
|
||||
reject(new Error("stream ended during read"));
|
||||
};
|
||||
|
||||
const e = () => {
|
||||
cleanup();
|
||||
reject(new StreamEndError('readLength end'));
|
||||
reject(new StreamEndError())
|
||||
};
|
||||
|
||||
const cleanup = () => {
|
||||
@@ -136,17 +136,12 @@ export async function readLine(readable: Readable) {
|
||||
}
|
||||
|
||||
export async function readString(readable: Readable | Promise<Readable>) {
|
||||
const buffer = await readBuffer(readable);
|
||||
return buffer.toString();
|
||||
}
|
||||
|
||||
export async function readBuffer(readable: Readable | Promise<Readable>) {
|
||||
const buffers: Buffer[] = [];
|
||||
let data = '';
|
||||
readable = await readable;
|
||||
readable.on('data', buffer => {
|
||||
buffers.push(buffer);
|
||||
data += buffer.toString();
|
||||
});
|
||||
readable.resume();
|
||||
await once(readable, 'end')
|
||||
return Buffer.concat(buffers);
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { RpcPeer } from "../../server/src/rpc";
|
||||
import { createRpcSerializer } from "../../server/src/rpc-serializer";
|
||||
import { RpcPeer } from "@scrypted/server/src/rpc";
|
||||
import { createRpcSerializer } from "@scrypted/server/src/rpc-serializer";
|
||||
import type { RTCSignalingSession } from "@scrypted/sdk";
|
||||
|
||||
export async function createBrowserSignalingSession(ws: WebSocket, localName: string, remoteName: string) {
|
||||
|
||||
@@ -41,15 +41,15 @@ export function isPeerConnectionClosed(pc: RTCPeerConnection) {
|
||||
|| pc.iceConnectionState === 'closed';
|
||||
}
|
||||
|
||||
// function silence() {
|
||||
// let ctx = new AudioContext(), oscillator = ctx.createOscillator();
|
||||
// const dest = ctx.createMediaStreamDestination();
|
||||
// oscillator.connect(dest);
|
||||
// oscillator.start();
|
||||
// const ret = dest.stream.getAudioTracks()[0];
|
||||
// ret.enabled = false;
|
||||
// return ret;
|
||||
// }
|
||||
function silence() {
|
||||
let ctx = new AudioContext(), oscillator = ctx.createOscillator();
|
||||
const dest = ctx.createMediaStreamDestination();
|
||||
oscillator.connect(dest);
|
||||
oscillator.start();
|
||||
const ret = dest.stream.getAudioTracks()[0];
|
||||
ret.enabled = false;
|
||||
return ret;
|
||||
}
|
||||
|
||||
function createOptions() {
|
||||
const options: RTCSignalingOptions = {
|
||||
@@ -110,9 +110,7 @@ export class BrowserSignalingSession implements RTCSignalingSession {
|
||||
await this.microphone.replaceTrack(mic.getTracks()[0]);
|
||||
}
|
||||
|
||||
if (this.microphone?.track) {
|
||||
this.microphone.track.enabled = enabled;
|
||||
}
|
||||
this.microphone.track.enabled = enabled;
|
||||
}
|
||||
|
||||
close() {
|
||||
|
||||
@@ -89,51 +89,27 @@ export const H264_NAL_TYPE_FU_B = 29;
|
||||
export const H264_NAL_TYPE_MTAP16 = 26;
|
||||
export const H264_NAL_TYPE_MTAP32 = 27;
|
||||
|
||||
export const H265_NAL_TYPE_AGG = 48;
|
||||
export const H265_NAL_TYPE_VPS = 32;
|
||||
export const H265_NAL_TYPE_SPS = 33;
|
||||
export const H265_NAL_TYPE_PPS = 34;
|
||||
export const H265_NAL_TYPE_BLA_W_LP = 16;
|
||||
export const H265_NAL_TYPE_BLA_W_RADL = 17;
|
||||
export const H265_NAL_TYPE_BLA_N_LP = 18;
|
||||
export const H265_NAL_TYPE_IDR_W_RADL = 19;
|
||||
export const H265_NAL_TYPE_IDR_N_LP = 20;
|
||||
export const H265_NAL_TYPE_CRA_NUT = 21;
|
||||
export const H265_NAL_TYPE_FU = 49;
|
||||
export const H265_NAL_TYPE_SEI_PREFIX = 39;
|
||||
export const H265_NAL_TYPE_SEI_SUFFIX = 40;
|
||||
|
||||
export function findH264NaluType(streamChunk: StreamChunk, naluType: number) {
|
||||
if (streamChunk.type !== 'h264')
|
||||
return;
|
||||
return findH264NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
|
||||
}
|
||||
|
||||
export function findH265NaluType(streamChunk: StreamChunk, naluType: number) {
|
||||
if (streamChunk.type !== 'h265')
|
||||
return;
|
||||
return findH265NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
|
||||
}
|
||||
|
||||
export function parseH264NaluType(firstNaluByte: number) {
|
||||
return firstNaluByte & 0x1f;
|
||||
}
|
||||
|
||||
export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
|
||||
const checkNaluType = parseH264NaluType(nalu[0]);
|
||||
const checkNaluType = nalu[0] & 0x1f;
|
||||
if (checkNaluType === H264_NAL_TYPE_STAP_A) {
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = parseH264NaluType(nalu[pos]);
|
||||
const stapaType = nalu[pos] & 0x1f;
|
||||
if (stapaType === naluType)
|
||||
return nalu.subarray(pos, pos + naluLength);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (checkNaluType === H264_NAL_TYPE_FU_A) {
|
||||
const fuaType = parseH264NaluType(nalu[1]);
|
||||
const fuaType = nalu[1] & 0x1f;
|
||||
const isFuStart = !!(nalu[1] & 0x80);
|
||||
|
||||
if (fuaType === naluType && isFuStart)
|
||||
@@ -145,52 +121,39 @@ export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
|
||||
return;
|
||||
}
|
||||
|
||||
function parseH265NaluType(firstNaluByte: number) {
|
||||
return (firstNaluByte & 0b01111110) >> 1;
|
||||
}
|
||||
|
||||
export function findH265NaluTypeInNalu(nalu: Buffer, naluType: number) {
|
||||
const checkNaluType = parseH265NaluType(nalu[0]);
|
||||
if (checkNaluType === H265_NAL_TYPE_AGG) {
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = parseH265NaluType(nalu[pos]);
|
||||
if (stapaType === naluType)
|
||||
return nalu.subarray(pos, pos + naluLength);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (checkNaluType === naluType) {
|
||||
return nalu;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
export function getStartedH264NaluTypes(streamChunk: StreamChunk) {
|
||||
export function getNaluTypes(streamChunk: StreamChunk) {
|
||||
if (streamChunk.type !== 'h264')
|
||||
return new Set<number>();
|
||||
return getNaluTypesInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), true)
|
||||
return getNaluTypesInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
|
||||
}
|
||||
|
||||
export function getNaluFragmentInformation(nalu: Buffer) {
|
||||
const naluType = nalu[0] & 0x1f;
|
||||
const fua = naluType === H264_NAL_TYPE_FU_A;
|
||||
return {
|
||||
fua,
|
||||
fuaStart: fua && !!(nalu[1] & 0x80),
|
||||
fuaEnd: fua && !!(nalu[1] & 0x40),
|
||||
}
|
||||
}
|
||||
|
||||
export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
|
||||
const ret = new Set<number>();
|
||||
const naluType = parseH264NaluType(nalu[0]);
|
||||
const naluType = nalu[0] & 0x1f;
|
||||
if (naluType === H264_NAL_TYPE_STAP_A) {
|
||||
ret.add(H264_NAL_TYPE_STAP_A);
|
||||
let pos = 1;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = parseH264NaluType(nalu[pos]);
|
||||
const stapaType = nalu[pos] & 0x1f;
|
||||
ret.add(stapaType);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (naluType === H264_NAL_TYPE_FU_A) {
|
||||
ret.add(H264_NAL_TYPE_FU_A);
|
||||
const fuaType = parseH264NaluType(nalu[1]);
|
||||
const fuaType = nalu[1] & 0x1f;
|
||||
if (fuaRequireStart) {
|
||||
const isFuStart = !!(nalu[1] & 0x80);
|
||||
if (isFuStart)
|
||||
@@ -212,70 +175,6 @@ export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaReq
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function getStartedH265NaluTypes(streamChunk: StreamChunk) {
|
||||
if (streamChunk.type !== 'h265')
|
||||
return new Set<number>();
|
||||
return getNaluTypesInH265Nalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), true)
|
||||
}
|
||||
|
||||
export function getNaluTypesInH265Nalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
|
||||
const ret = new Set<number>();
|
||||
const naluType = parseH265NaluType(nalu[0]);
|
||||
if (naluType === H265_NAL_TYPE_AGG) {
|
||||
ret.add(H265_NAL_TYPE_AGG);
|
||||
let pos = 2;
|
||||
while (pos < nalu.length) {
|
||||
const naluLength = nalu.readUInt16BE(pos);
|
||||
pos += 2;
|
||||
const stapaType = parseH265NaluType(nalu[pos]);
|
||||
ret.add(stapaType);
|
||||
pos += naluLength;
|
||||
}
|
||||
}
|
||||
else if (naluType === H265_NAL_TYPE_FU) {
|
||||
ret.add(H265_NAL_TYPE_FU);
|
||||
const fuaType = nalu[2] & 0x3F; // 6 bits
|
||||
if (fuaRequireStart) {
|
||||
const isFuStart = !!(nalu[2] & 0x80);
|
||||
if (isFuStart)
|
||||
ret.add(fuaType);
|
||||
}
|
||||
else if (fuaRequireEnd) {
|
||||
const isFuEnd = !!(nalu[2] & 0x40);
|
||||
if (isFuEnd)
|
||||
ret.add(fuaType);
|
||||
}
|
||||
else {
|
||||
ret.add(fuaType);
|
||||
}
|
||||
}
|
||||
else {
|
||||
ret.add(naluType);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
export function isH265KeyFrameRelatedInSet(naluTypes: Set<number>, allowCodecInfo = true) {
|
||||
if (naluTypes.has(H265_NAL_TYPE_IDR_N_LP)
|
||||
|| naluTypes.has(H265_NAL_TYPE_IDR_W_RADL)
|
||||
|| naluTypes.has(H265_NAL_TYPE_CRA_NUT)
|
||||
|| naluTypes.has(H265_NAL_TYPE_BLA_N_LP)
|
||||
|| naluTypes.has(H265_NAL_TYPE_BLA_W_LP)
|
||||
|| naluTypes.has(H265_NAL_TYPE_BLA_W_RADL)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (allowCodecInfo) {
|
||||
if (naluTypes.has(H265_NAL_TYPE_VPS)
|
||||
|| naluTypes.has(H265_NAL_TYPE_SPS)
|
||||
|| naluTypes.has(H265_NAL_TYPE_PPS))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
export function createRtspParser(options?: StreamParserOptions): RtspStreamParser {
|
||||
let resolve: any;
|
||||
|
||||
@@ -291,25 +190,17 @@ export function createRtspParser(options?: StreamParserOptions): RtspStreamParse
|
||||
'tcp',
|
||||
...(options?.vcodec || []),
|
||||
...(options?.acodec || []),
|
||||
// linux and windows seem to support 64000 but darwin is 32000?
|
||||
'-pkt_size', '32000',
|
||||
'-f', 'rtsp',
|
||||
],
|
||||
findSyncFrame(streamChunks: StreamChunk[]) {
|
||||
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
|
||||
const streamChunk = streamChunks[prebufferIndex];
|
||||
if (streamChunk.type === 'h264') {
|
||||
const naluTypes = getStartedH264NaluTypes(streamChunk);
|
||||
if (naluTypes.has(H264_NAL_TYPE_SPS) || naluTypes.has(H264_NAL_TYPE_IDR)) {
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
}
|
||||
if (streamChunk.type !== 'h264') {
|
||||
continue;
|
||||
}
|
||||
else if (streamChunk.type === 'h265') {
|
||||
const naluTypes = getStartedH265NaluTypes(streamChunk);
|
||||
|
||||
if (isH265KeyFrameRelatedInSet(naluTypes)) {
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
}
|
||||
if (findH264NaluType(streamChunk, H264_NAL_TYPE_SPS) || findH264NaluType(streamChunk, H264_NAL_TYPE_IDR)) {
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -435,7 +326,7 @@ export class RtspClient extends RtspBase {
|
||||
hasGetParameter = true;
|
||||
contentBase: string;
|
||||
|
||||
constructor(public readonly url: string) {
|
||||
constructor(public url: string) {
|
||||
super();
|
||||
const u = new URL(url);
|
||||
const port = parseInt(u.port) || 554;
|
||||
@@ -547,47 +438,11 @@ export class RtspClient extends RtspBase {
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
this.client.destroy(e as Error);
|
||||
this.client.destroy(e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async *handleStream(): AsyncGenerator<{
|
||||
rtcp: boolean,
|
||||
header: Buffer,
|
||||
packet: Buffer,
|
||||
channel: number,
|
||||
}> {
|
||||
while (true) {
|
||||
const header = await readLength(this.client, 4);
|
||||
// can this even happen? since the RTSP request method isn't a fixed
|
||||
// value like the "RTSP" in the RTSP response, I don't think so?
|
||||
if (header[0] !== RTSP_FRAME_MAGIC) {
|
||||
if (header.toString() !== 'RTSP')
|
||||
throw this.createBadHeader(header);
|
||||
|
||||
this.client.unshift(header);
|
||||
|
||||
// do what with this?
|
||||
const message = await super.readMessage();
|
||||
const body = await this.readBody(parseHeaders(message));
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
const length = header.readUInt16BE(2);
|
||||
const packet = await readLength(this.client, length);
|
||||
const id = header.readUInt8(1);
|
||||
|
||||
yield {
|
||||
channel: id,
|
||||
rtcp: id % 2 === 1,
|
||||
header,
|
||||
packet,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async readLoop() {
|
||||
const deferred = new Deferred<void>();
|
||||
|
||||
@@ -649,8 +504,7 @@ export class RtspClient extends RtspBase {
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
if (!deferred.finished)
|
||||
deferred.reject(e as Error);
|
||||
deferred.reject(e);
|
||||
this.client.destroy();
|
||||
}
|
||||
};
|
||||
@@ -686,15 +540,10 @@ export class RtspClient extends RtspBase {
|
||||
throw new Error('no WWW-Authenticate found');
|
||||
|
||||
const { BASIC } = await import('http-auth-utils');
|
||||
// @ts-ignore
|
||||
const { parseHTTPHeadersQuotedKeyValueSet } = await import('http-auth-utils/dist/utils');
|
||||
|
||||
const authedUrl = new URL(this.url);
|
||||
const username = decodeURIComponent(authedUrl.username);
|
||||
const password = decodeURIComponent(authedUrl.password);
|
||||
|
||||
if (this.wwwAuthenticate.includes('Basic')) {
|
||||
const hash = BASIC.computeHash({ username, password });
|
||||
const hash = BASIC.computeHash(url);
|
||||
return `Basic ${hash}`;
|
||||
}
|
||||
|
||||
@@ -714,6 +563,10 @@ export class RtspClient extends RtspBase {
|
||||
REQUIRED_WWW_AUTHENTICATE_KEYS,
|
||||
) as DigestWWWAuthenticateData;
|
||||
|
||||
const authedUrl = new URL(this.url);
|
||||
const username = decodeURIComponent(authedUrl.username);
|
||||
const password = decodeURIComponent(authedUrl.password);
|
||||
|
||||
const strippedUrl = new URL(url.toString());
|
||||
strippedUrl.username = '';
|
||||
strippedUrl.password = '';
|
||||
@@ -803,10 +656,7 @@ export class RtspClient extends RtspBase {
|
||||
Accept: 'application/sdp',
|
||||
});
|
||||
|
||||
this.contentBase = response.headers['content-base'] || response.headers['content-location'];
|
||||
// content base may be a relative path? seems odd.
|
||||
if (this.contentBase)
|
||||
this.contentBase = new URL(this.contentBase, this.url).toString();
|
||||
this.contentBase = response.headers['content-base'] || response.headers['content-location'];;
|
||||
return response;
|
||||
}
|
||||
|
||||
@@ -1204,7 +1054,7 @@ export class RtspServer {
|
||||
}
|
||||
|
||||
export async function listenSingleRtspClient<T extends RtspServer>(options?: {
|
||||
hostname: string,
|
||||
hostname?: string,
|
||||
pathToken?: string,
|
||||
createServer?(duplex: Duplex): T,
|
||||
}) {
|
||||
|
||||
@@ -175,8 +175,6 @@ export type RTPMap = ReturnType<typeof parseRtpMap>;
|
||||
export function parseRtpMap(mline: ReturnType<typeof parseMLine>, rtpmap: string) {
|
||||
const mlineType = mline.type;
|
||||
const match = rtpmap?.match(/a=rtpmap:([\d]+) (.*?)\/([\d]+)(\/([\d]+))?/);
|
||||
let channels = parseInt(match?.[5]) || undefined;
|
||||
let payloadType = parseInt(match?.[1]);
|
||||
|
||||
rtpmap = rtpmap?.toLowerCase();
|
||||
|
||||
@@ -224,20 +222,10 @@ export function parseRtpMap(mline: ReturnType<typeof parseMLine>, rtpmap: string
|
||||
if (mline.payloadTypes?.includes(0)) {
|
||||
codec = 'pcm_mulaw';
|
||||
ffmpegEncoder = 'pcm_mulaw';
|
||||
payloadType = 0;
|
||||
channels = 1;
|
||||
}
|
||||
else if (mline.payloadTypes?.includes(8)) {
|
||||
codec = 'pcm_alaw';
|
||||
ffmpegEncoder = 'pcm_alaw';
|
||||
payloadType = 8;
|
||||
channels = 1;
|
||||
}
|
||||
else if (mline.payloadTypes?.includes(14)) {
|
||||
codec = 'mp3';
|
||||
ffmpegEncoder = 'mp3';
|
||||
payloadType = 14;
|
||||
channels = 2;
|
||||
}
|
||||
else {
|
||||
// ffmpeg seems to omit the rtpmap type for pcm alaw when creating sdp?
|
||||
@@ -247,29 +235,17 @@ export function parseRtpMap(mline: ReturnType<typeof parseMLine>, rtpmap: string
|
||||
// https://en.wikipedia.org/wiki/RTP_payload_formats
|
||||
codec = 'pcm_alaw';
|
||||
ffmpegEncoder = 'pcm_alaw';
|
||||
payloadType = 8;
|
||||
channels = 1;
|
||||
}
|
||||
}
|
||||
|
||||
// assigned payload types do not need to provide a clock, there is a default.
|
||||
let clock = parseInt(match?.[3]);
|
||||
if (!clock) {
|
||||
clock = undefined;
|
||||
if (codec === 'pcm_mulaw' || codec === 'pcm_alaw')
|
||||
clock = 8000;
|
||||
else if (codec === 'pcm_s16be')
|
||||
clock = 16000;
|
||||
}
|
||||
|
||||
return {
|
||||
line: rtpmap,
|
||||
codec,
|
||||
ffmpegEncoder,
|
||||
rawCodec: match?.[2],
|
||||
clock,
|
||||
channels,
|
||||
payloadType,
|
||||
clock: parseInt(match?.[3]),
|
||||
channels: parseInt(match?.[5]) || undefined,
|
||||
payloadType: parseInt(match?.[1]),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -379,33 +355,3 @@ export function getSpsPps(
|
||||
pps: Buffer.from(pps, 'base64'),
|
||||
}
|
||||
}
|
||||
|
||||
export function getSpsPpsVps(
|
||||
section: {
|
||||
fmtp: {
|
||||
payloadType: number;
|
||||
parameters: {
|
||||
[key: string]: string;
|
||||
};
|
||||
}[]
|
||||
}
|
||||
) {
|
||||
const parameters = section?.fmtp?.[0]?.parameters;
|
||||
if (!parameters) {
|
||||
return {
|
||||
sps: undefined,
|
||||
pps: undefined,
|
||||
vps: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
const sps = parameters['sprop-sps'];
|
||||
const pps = parameters['sprop-pps'];
|
||||
const vps = parameters['sprop-vps'];
|
||||
|
||||
return {
|
||||
sps: sps ? Buffer.from(sps, 'base64') : undefined,
|
||||
pps: pps ? Buffer.from(pps, 'base64') : undefined,
|
||||
vps: vps ? Buffer.from(vps, 'base64') : undefined,
|
||||
}
|
||||
}
|
||||
@@ -1 +1 @@
|
||||
export { sleep } from "../../server/src/sleep";
|
||||
export * from "@scrypted/server/src/sleep"
|
||||
@@ -2,6 +2,7 @@ import { Socket as DatagramSocket } from "dgram";
|
||||
import { once } from "events";
|
||||
import { Duplex } from "stream";
|
||||
import { FFMPEG_FRAGMENTED_MP4_OUTPUT_ARGS, MP4Atom, parseFragmentedMP4 } from "./ffmpeg-mp4-parser-session";
|
||||
import { readLength } from "./read-stream";
|
||||
|
||||
export interface StreamParser {
|
||||
container: string;
|
||||
@@ -24,11 +25,59 @@ export interface StreamParserOptions {
|
||||
export interface StreamChunk {
|
||||
startStream?: Buffer;
|
||||
chunks: Buffer[];
|
||||
type: string;
|
||||
type?: string;
|
||||
width?: number;
|
||||
height?: number;
|
||||
}
|
||||
|
||||
// function checkTsPacket(pkt: Buffer) {
|
||||
// const pid = ((pkt[1] & 0x1F) << 8) | pkt[2];
|
||||
// if (pid == 256) {
|
||||
// // found video stream
|
||||
// if ((pkt[3] & 0x20) && (pkt[4] > 0)) {
|
||||
// // have AF
|
||||
// if (pkt[5] & 0x40) {
|
||||
// // found keyframe
|
||||
// console.log('keyframe');
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
function createLengthParser(length: number, verify?: (concat: Buffer) => void) {
|
||||
async function* parse(socket: Duplex): AsyncGenerator<StreamChunk> {
|
||||
let pending: Buffer[] = [];
|
||||
let pendingSize = 0;
|
||||
while (true) {
|
||||
const data: Buffer = socket.read();
|
||||
if (!data) {
|
||||
await once(socket, 'readable');
|
||||
continue;
|
||||
}
|
||||
pending.push(data);
|
||||
pendingSize += data.length;
|
||||
if (pendingSize < length)
|
||||
continue;
|
||||
|
||||
const concat = Buffer.concat(pending);
|
||||
|
||||
verify?.(concat);
|
||||
|
||||
const remaining = concat.length % length;
|
||||
const left = concat.slice(0, concat.length - remaining);
|
||||
const right = concat.slice(concat.length - remaining);
|
||||
pending = [right];
|
||||
pendingSize = right.length;
|
||||
|
||||
yield {
|
||||
chunks: [left],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return parse;
|
||||
}
|
||||
|
||||
export function createDgramParser() {
|
||||
async function* parse(socket: DatagramSocket, width: number, height: number, type: string) {
|
||||
while (true) {
|
||||
@@ -42,6 +91,65 @@ export function createDgramParser() {
|
||||
return parse;
|
||||
}
|
||||
|
||||
export function createMpegTsParser(options?: StreamParserOptions): StreamParser {
|
||||
return {
|
||||
container: 'mpegts',
|
||||
outputArguments: [
|
||||
...(options?.vcodec || []),
|
||||
...(options?.acodec || []),
|
||||
'-f', 'mpegts',
|
||||
],
|
||||
parse: createLengthParser(188, concat => {
|
||||
if (concat[0] != 0x47) {
|
||||
throw new Error('Invalid sync byte in mpeg-ts packet. Terminating stream.')
|
||||
}
|
||||
}),
|
||||
findSyncFrame(streamChunks): StreamChunk[] {
|
||||
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
|
||||
const streamChunk = streamChunks[prebufferIndex];
|
||||
|
||||
for (let chunkIndex = 0; chunkIndex < streamChunk.chunks.length; chunkIndex++) {
|
||||
const chunk = streamChunk.chunks[chunkIndex];
|
||||
|
||||
let offset = 0;
|
||||
while (offset + 188 < chunk.length) {
|
||||
const pkt = chunk.subarray(offset, offset + 188);
|
||||
const pid = ((pkt[1] & 0x1F) << 8) | pkt[2];
|
||||
if (pid == 256) {
|
||||
// found video stream
|
||||
if ((pkt[3] & 0x20) && (pkt[4] > 0)) {
|
||||
// have AF
|
||||
if (pkt[5] & 0x40) {
|
||||
// we found the sync frame, but also need to send the pat and pmt
|
||||
// which might be at the start of this chunk before the keyframe.
|
||||
// yolo!
|
||||
return streamChunks.slice(prebufferIndex);
|
||||
// const chunks = streamChunk.chunks.slice(chunkIndex + 1);
|
||||
// const take = chunk.subarray(offset);
|
||||
// chunks.unshift(take);
|
||||
|
||||
// const remainingChunks = streamChunks.slice(prebufferIndex + 1);
|
||||
// const ret = Object.assign({}, streamChunk);
|
||||
// ret.chunks = chunks;
|
||||
// return [
|
||||
// ret,
|
||||
// ...remainingChunks
|
||||
// ];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
offset += 188;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return findSyncFrame(streamChunks);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function* parseMp4StreamChunks(parser: AsyncGenerator<MP4Atom>) {
|
||||
let ftyp: MP4Atom;
|
||||
let moov: MP4Atom;
|
||||
@@ -105,3 +213,54 @@ export const PIXEL_FORMAT_RGB24: RawVideoPixelFormat = {
|
||||
name: 'rgb24',
|
||||
computeLength: (width, height) => width * height * 3,
|
||||
}
|
||||
|
||||
export function createRawVideoParser(options: RawVideoParserOptions): StreamParser {
|
||||
const pixelFormat = options?.pixelFormat || PIXEL_FORMAT_YUV420P;
|
||||
let filter: string;
|
||||
const { size, everyNFrames } = options;
|
||||
if (size) {
|
||||
filter = `scale=${size.width}:${size.height}`;
|
||||
}
|
||||
if (everyNFrames && everyNFrames > 1) {
|
||||
if (filter)
|
||||
filter += ',';
|
||||
else
|
||||
filter = '';
|
||||
filter = filter + `select=not(mod(n\\,${everyNFrames}))`
|
||||
}
|
||||
|
||||
const inputArguments: string[] = [];
|
||||
if (options.size)
|
||||
inputArguments.push('-s', `${options.size.width}x${options.size.height}`);
|
||||
|
||||
inputArguments.push('-pix_fmt', pixelFormat.name);
|
||||
return {
|
||||
inputArguments,
|
||||
container: 'rawvideo',
|
||||
outputArguments: [
|
||||
'-s', `${options.size.width}x${options.size.height}`,
|
||||
'-an',
|
||||
'-vcodec', 'rawvideo',
|
||||
'-pix_fmt', pixelFormat.name,
|
||||
'-f', 'rawvideo',
|
||||
],
|
||||
async *parse(socket: Duplex, width: number, height: number): AsyncGenerator<StreamChunk> {
|
||||
width = size?.width || width;
|
||||
height = size?.height || height
|
||||
|
||||
if (!width || !height)
|
||||
throw new Error("error parsing rawvideo, unknown width and height");
|
||||
|
||||
const toRead = pixelFormat.computeLength(width, height);
|
||||
while (true) {
|
||||
const buffer = await readLength(socket, toRead);
|
||||
yield {
|
||||
chunks: [buffer],
|
||||
width,
|
||||
height,
|
||||
}
|
||||
}
|
||||
},
|
||||
findSyncFrame,
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
export abstract class AsyncUsingHolderBase<T> {
|
||||
constructor(private _value: T) {
|
||||
}
|
||||
|
||||
get value(): T {
|
||||
return this._value;
|
||||
}
|
||||
|
||||
async [Symbol.asyncDispose]() {
|
||||
await this.release();
|
||||
}
|
||||
|
||||
abstract asyncDispose(value: T): Promise<void>;
|
||||
|
||||
detach() {
|
||||
const value = this._value;
|
||||
this._value = undefined;
|
||||
return value;
|
||||
}
|
||||
|
||||
async replace(value: T) {
|
||||
this.release();
|
||||
this._value = value;
|
||||
}
|
||||
|
||||
async release() {
|
||||
const released = this.detach();
|
||||
if (released)
|
||||
await this.asyncDispose(released);
|
||||
}
|
||||
}
|
||||
export abstract class UsingHolderBase<T> {
|
||||
constructor(private _value: T) {
|
||||
}
|
||||
|
||||
get value(): T {
|
||||
return this._value;
|
||||
}
|
||||
|
||||
[Symbol.dispose]() {
|
||||
this.release();
|
||||
}
|
||||
|
||||
abstract dispose(value: T): void;
|
||||
|
||||
detach() {
|
||||
const value = this._value;
|
||||
this._value = undefined;
|
||||
return value;
|
||||
}
|
||||
|
||||
replace(value: T) {
|
||||
this.release();
|
||||
this._value = value;
|
||||
}
|
||||
|
||||
release() {
|
||||
const released = this.detach();
|
||||
if (released)
|
||||
this.dispose(released);
|
||||
}
|
||||
}
|
||||
|
||||
export class UsingHolder<T extends Disposable> extends UsingHolderBase<T> {
|
||||
dispose(value: T) {
|
||||
value?.[Symbol.dispose]();
|
||||
}
|
||||
|
||||
transferClosure<V>(closure: (value: UsingHolder<T>) => Promise<V>) {
|
||||
return (async () => {
|
||||
using attached = new UsingHolder(this.detach());
|
||||
return await closure(attached);
|
||||
})();
|
||||
}
|
||||
}
|
||||
|
||||
export class AsyncUsingHolder<T extends AsyncDisposable> extends AsyncUsingHolderBase<T> {
|
||||
async asyncDispose(value: T) {
|
||||
value?.[Symbol.asyncDispose]();
|
||||
}
|
||||
|
||||
transferClosure<V>(closure: (value: AsyncUsingHolder<T>) => Promise<V>) {
|
||||
return (async () => {
|
||||
await using attached = new AsyncUsingHolder(this.detach());
|
||||
return await closure(attached);
|
||||
})();
|
||||
}
|
||||
}
|
||||
|
||||
export class DisposableHolder<T> extends UsingHolderBase<T> {
|
||||
constructor(value: T, private _dispose: (value: T) => void) {
|
||||
super(value);
|
||||
}
|
||||
|
||||
dispose(value: T) {
|
||||
this._dispose(value);
|
||||
}
|
||||
}
|
||||
@@ -1,63 +1,25 @@
|
||||
import sdk, { ForkOptions, PluginFork } from '@scrypted/sdk';
|
||||
import sdk, { PluginFork } from '@scrypted/sdk';
|
||||
import worker_threads from 'worker_threads';
|
||||
import { createAsyncQueue } from './async-queue';
|
||||
import os from 'os';
|
||||
|
||||
export type Zygote<T> = () => PluginFork<T>;
|
||||
|
||||
export function createService<T, V>(options: ForkOptions, create: (t: Promise<T>) => Promise<V>): {
|
||||
getResult: () => Promise<V>,
|
||||
terminate: () => void,
|
||||
} {
|
||||
let killed = false;
|
||||
let currentResult: Promise<V>;
|
||||
let currentFork: ReturnType<typeof sdk.fork<T>>;
|
||||
export function createZygote<T>(): Zygote<T> {
|
||||
if (!worker_threads.isMainThread)
|
||||
return;
|
||||
|
||||
return {
|
||||
getResult() {
|
||||
if (killed)
|
||||
throw new Error('service terminated');
|
||||
|
||||
if (currentResult)
|
||||
return currentResult;
|
||||
|
||||
currentFork = sdk.fork<T>(options);
|
||||
currentFork.worker.on('exit', () => currentResult = undefined);
|
||||
currentResult = create(currentFork.result);
|
||||
currentResult.catch(() => {
|
||||
currentResult = undefined;
|
||||
});
|
||||
return currentResult;
|
||||
},
|
||||
|
||||
terminate() {
|
||||
if (killed)
|
||||
return;
|
||||
|
||||
killed = true;
|
||||
currentFork.worker.terminate();
|
||||
currentFork = undefined;
|
||||
currentResult = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function createZygote<T>(options?: ForkOptions): Zygote<T> {
|
||||
let zygote = sdk.fork<T>(options);
|
||||
let zygote = sdk.fork<T>();
|
||||
function* next() {
|
||||
while (true) {
|
||||
const cur = zygote;
|
||||
zygote = sdk.fork<T>(options);
|
||||
zygote = sdk.fork<T>();
|
||||
yield cur;
|
||||
}
|
||||
}
|
||||
|
||||
const gen = next();
|
||||
return () => {
|
||||
const ret = gen.next();
|
||||
if (ret.done || !ret.value)
|
||||
throw new Error('Zygote exhausted');
|
||||
return ret.value;
|
||||
};
|
||||
return () => gen.next().value as PluginFork<T>;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "Node16",
|
||||
"moduleResolution": "Node16",
|
||||
"module": "commonjs",
|
||||
"target": "esnext",
|
||||
"noImplicitAny": true,
|
||||
"outDir": "./dist",
|
||||
|
||||
BIN
docs/plugins/core/ui/favicon.ico
Normal file
|
After Width: | Height: | Size: 4.2 KiB |
BIN
docs/plugins/core/ui/images/cameraloading.jpg
Normal file
|
After Width: | Height: | Size: 5.6 KiB |
BIN
docs/plugins/core/ui/img/icons/apple-touch-icon-152x152.png
Normal file
|
After Width: | Height: | Size: 5.9 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-128x128.png
Executable file
|
After Width: | Height: | Size: 14 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-144x144.png
Executable file
|
After Width: | Height: | Size: 17 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-152x152.png
Executable file
|
After Width: | Height: | Size: 18 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-16x16.png
Normal file
|
After Width: | Height: | Size: 1.5 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-192x192.png
Executable file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-32x32.png
Normal file
|
After Width: | Height: | Size: 2.5 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-384x384.png
Executable file
|
After Width: | Height: | Size: 75 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-512x512.png
Executable file
|
After Width: | Height: | Size: 91 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-72x72.png
Executable file
|
After Width: | Height: | Size: 6.3 KiB |
BIN
docs/plugins/core/ui/img/icons/favicon-96x96.png
Executable file
|
After Width: | Height: | Size: 9.8 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-128x128.png
Executable file
|
After Width: | Height: | Size: 14 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-144x144.png
Executable file
|
After Width: | Height: | Size: 17 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-152x152.png
Executable file
|
After Width: | Height: | Size: 18 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-192x192.png
Executable file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-384x384.png
Executable file
|
After Width: | Height: | Size: 75 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-512x512.png
Executable file
|
After Width: | Height: | Size: 91 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-72x72.png
Executable file
|
After Width: | Height: | Size: 6.3 KiB |
BIN
docs/plugins/core/ui/img/icons/icon-96x96.png
Executable file
|
After Width: | Height: | Size: 9.8 KiB |
BIN
docs/plugins/core/ui/img/icons/msapplication-icon-144x144.png
Normal file
|
After Width: | Height: | Size: 5.6 KiB |
1
docs/plugins/core/ui/img/icons/safari-pinned-tab.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-cpu"><rect x="4" y="4" width="16" height="16" rx="2" ry="2"></rect><rect x="9" y="9" width="6" height="6"></rect><line x1="9" y1="1" x2="9" y2="4"></line><line x1="15" y1="1" x2="15" y2="4"></line><line x1="9" y1="20" x2="9" y2="23"></line><line x1="15" y1="20" x2="15" y2="23"></line><line x1="20" y1="9" x2="23" y2="9"></line><line x1="20" y1="14" x2="23" y2="14"></line><line x1="1" y1="9" x2="4" y2="9"></line><line x1="1" y1="14" x2="4" y2="14"></line></svg>
|
||||
|
After Width: | Height: | Size: 667 B |
26
docs/plugins/core/ui/index.html
Normal file
@@ -0,0 +1,26 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
|
||||
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
|
||||
<title>Scrypted Management Console</title>
|
||||
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
|
||||
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Material+Icons">
|
||||
|
||||
<link href="https://fonts.googleapis.com/css2?family=Lato:wght@300;400;700&display=swap" rel="stylesheet">
|
||||
|
||||
<link href="https://fonts.googleapis.com/css2?family=Quicksand:wght@300;400;500;600;700&display=swap" rel="stylesheet">
|
||||
</head>
|
||||
<body>
|
||||
<noscript>
|
||||
<strong>We're sorry but web doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
|
||||
</noscript>
|
||||
<div id="app"></div>
|
||||
<!-- built files will be auto injected -->
|
||||
</body>
|
||||
</html>
|
||||
45
docs/plugins/core/ui/manifest.json
Normal file
@@ -0,0 +1,45 @@
|
||||
{
|
||||
"name": "Scrypted Management Console",
|
||||
"short_name": "Scrypted",
|
||||
"icons": [
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-72x72.png",
|
||||
"sizes": "72x72",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-96x96.png",
|
||||
"sizes": "96x96",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-144x144.png",
|
||||
"sizes": "144x144",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-152x152.png",
|
||||
"sizes": "152x152",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-192x192.png",
|
||||
"sizes": "192x192",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-384x384.png",
|
||||
"sizes": "384x384",
|
||||
"type": "image/png"
|
||||
},
|
||||
{
|
||||
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-512x512.png",
|
||||
"sizes": "512x512",
|
||||
"type": "image/png"
|
||||
}
|
||||
],
|
||||
"start_url": "./index.html",
|
||||
"display": "standalone",
|
||||
"background_color": "#000000",
|
||||
"theme_color": "#424242"
|
||||
}
|
||||
2
docs/plugins/core/ui/robots.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
User-agent: *
|
||||
Disallow:
|
||||
2
external/ring-client-api
vendored
2
external/unifi-protect
vendored
2
external/werift
vendored
@@ -1,12 +1,13 @@
|
||||
# Home Assistant Addon Configuration
|
||||
name: Scrypted
|
||||
version: "v0.143.0-noble-full"
|
||||
version: "18-jammy-full.s6-v0.93.0"
|
||||
slug: scrypted
|
||||
description: Scrypted is a high performance home video integration and automation platform
|
||||
url: "https://github.com/koush/scrypted"
|
||||
arch:
|
||||
- amd64
|
||||
- aarch64
|
||||
- armv7
|
||||
init: false
|
||||
ingress: true
|
||||
ingress_port: 11080
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE="noble-full"
|
||||
ARG BASE="20-jammy-full"
|
||||
FROM ghcr.io/koush/scrypted-common:${BASE}
|
||||
|
||||
WORKDIR /
|
||||
@@ -16,6 +16,6 @@ ENV NODE_OPTIONS="--dns-result-order=ipv4first"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION="20250101"
|
||||
ENV SCRYPTED_BASE_VERSION="20240321"
|
||||
|
||||
CMD ["/bin/sh", "-c", "ulimit -c 0; exec npm --prefix /server exec scrypted-serve"]
|
||||
CMD npm --prefix /server exec scrypted-serve
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE="noble-full"
|
||||
ARG BASE="16-jammy"
|
||||
FROM ghcr.io/koush/scrypted-common:${BASE}
|
||||
|
||||
WORKDIR /
|
||||
@@ -8,4 +8,4 @@ WORKDIR /scrypted/server
|
||||
RUN npm install
|
||||
RUN npm run build
|
||||
|
||||
CMD ["npm", "run", "serve-no-build"]
|
||||
CMD npm run serve-no-build
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
ARG BASE="ghcr.io/koush/scrypted-common:noble-amd"
|
||||
FROM $BASE
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="amd"
|
||||
|
||||
# amd opencl
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-amd-graphics.sh | bash
|
||||
@@ -6,20 +6,25 @@
|
||||
# This common file will be used by both Docker and the linux
|
||||
# install script.
|
||||
################################################################
|
||||
ARG BASE="noble"
|
||||
FROM ubuntu:${BASE} AS header
|
||||
ARG BASE="jammy"
|
||||
FROM ubuntu:${BASE} as header
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# base tools and development stuff
|
||||
RUN apt-get update && apt-get -y install \
|
||||
curl software-properties-common apt-utils \
|
||||
build-essential \
|
||||
cmake \
|
||||
ffmpeg \
|
||||
gcc \
|
||||
libcairo2-dev \
|
||||
libgirepository1.0-dev \
|
||||
pkg-config && \
|
||||
apt-get -y update && \
|
||||
apt-get -y upgrade
|
||||
|
||||
ARG NODE_VERSION=22
|
||||
ARG NODE_VERSION=20
|
||||
RUN apt-get install -y ca-certificates curl gnupg
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor --yes -o /etc/apt/keyrings/nodesource.gpg
|
||||
@@ -35,12 +40,28 @@ RUN apt-get -y install \
|
||||
python3-setuptools \
|
||||
python3-wheel
|
||||
|
||||
# these are necessary for pillow-simd, additional on disk size is small
|
||||
# but could consider removing this.
|
||||
RUN echo "Installing pillow-simd dependencies."
|
||||
RUN apt-get -y install \
|
||||
libjpeg-dev zlib1g-dev
|
||||
|
||||
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
|
||||
RUN echo "Installing gstreamer."
|
||||
RUN apt-get -y install \
|
||||
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
|
||||
gstreamer1.0-vaapi
|
||||
|
||||
# python3 gstreamer bindings
|
||||
RUN echo "Installing gstreamer bindings."
|
||||
RUN apt-get -y install \
|
||||
python3-gst-1.0
|
||||
|
||||
# allow pip to install to system
|
||||
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
|
||||
# ERROR: Cannot uninstall pip 24.0, RECORD file not found. Hint: The package was installed by debian.
|
||||
# RUN python3 -m pip install --upgrade pip
|
||||
RUN python3 -m pip install debugpy
|
||||
RUN python3 -m pip install --upgrade pip
|
||||
RUN python3 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
################################################################
|
||||
# End section generated from template/Dockerfile.full.header
|
||||
@@ -48,17 +69,11 @@ RUN python3 -m pip install debugpy
|
||||
################################################################
|
||||
# Begin section generated from template/Dockerfile.full.footer
|
||||
################################################################
|
||||
FROM header AS base
|
||||
FROM header as base
|
||||
|
||||
# vulkan
|
||||
RUN apt -y install libvulkan1
|
||||
|
||||
# intel opencl for openvino
|
||||
# intel opencl gpu for openvino
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash
|
||||
|
||||
# intel NPU
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-npu.sh | bash
|
||||
|
||||
# python 3.9 from ppa.
|
||||
# 3.9 is the version with prebuilt support for tensorflow lite
|
||||
RUN add-apt-repository -y ppa:deadsnakes/ppa && \
|
||||
@@ -70,29 +85,25 @@ RUN add-apt-repository -y ppa:deadsnakes/ppa && \
|
||||
# allow pip to install to system
|
||||
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
|
||||
|
||||
# RUN python3.9 -m pip install --upgrade pip
|
||||
RUN python3.9 -m pip install debugpy
|
||||
RUN python3.9 -m pip install --upgrade pip
|
||||
RUN python3.9 -m pip install debugpy typing_extensions psutil
|
||||
|
||||
# Coral Edge TPU
|
||||
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
|
||||
RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" | tee /etc/apt/sources.list.d/coral-edgetpu.list
|
||||
RUN curl -fsSL https://packages.cloud.google.com/apt/doc/apt-key.gpg | gpg --dearmor -o /etc/apt/trusted.gpg.d/coral-edgetpu.gpg
|
||||
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
|
||||
RUN apt-get -y update && apt-get -y install libedgetpu1-std
|
||||
|
||||
# set default shell to bash
|
||||
RUN chsh -s /bin/bash
|
||||
ENV SHELL="/bin/bash"
|
||||
|
||||
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
|
||||
ENV SCRYPTED_CAN_RESTART="true"
|
||||
ENV SCRYPTED_VOLUME="/server/volume"
|
||||
ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
|
||||
RUN test -f "/usr/bin/ffmpeg" && test -f "/usr/bin/python3" && test -f "/usr/bin/python3.9" && test -f "/usr/bin/python3.12"
|
||||
RUN test -f "/usr/bin/ffmpeg" && test -f "/usr/bin/python3" && test -f "/usr/bin/python3.9" && test -f "/usr/bin/python3.10"
|
||||
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
|
||||
ENV SCRYPTED_PYTHON_PATH="/usr/bin/python3"
|
||||
ENV SCRYPTED_PYTHON39_PATH="/usr/bin/python3.9"
|
||||
ENV SCRYPTED_PYTHON312_PATH="/usr/bin/python3.12"
|
||||
ENV SCRYPTED_PYTHON310_PATH="/usr/bin/python3.10"
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="full"
|
||||
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
ARG BASE="ghcr.io/koush/scrypted-common:noble-intel"
|
||||
FROM $BASE
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="intel"
|
||||
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-oneapi.sh | bash
|
||||
ENV LD_LIBRARY_PATH=/opt/intel/oneapi/tcm/latest/lib
|
||||
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/oneapi/umf/latest/lib
|
||||
|
||||
# gcc4.8 does not have a latest link however, it does seem to point to a relative lib path
|
||||
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/oneapi/tbb/latest/env/../lib/intel64/gcc4.8
|
||||
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/oneapi/tbb/latest/lib
|
||||
|
||||
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/oneapi/mkl/latest/lib
|
||||
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/oneapi/compiler/latest/opt/compiler/lib
|
||||
ENV LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:/opt/intel/oneapi/compiler/latest/lib
|
||||
@@ -1,7 +1,5 @@
|
||||
ARG BASE="noble-lite"
|
||||
FROM ubuntu:${BASE} AS header
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="lite"
|
||||
ARG BASE="jammy"
|
||||
FROM ubuntu:${BASE} as header
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
@@ -12,19 +10,23 @@ RUN apt-get update && apt-get -y install \
|
||||
apt-get -y update && \
|
||||
apt-get -y upgrade
|
||||
|
||||
ARG NODE_VERSION=22
|
||||
ARG NODE_VERSION=20
|
||||
RUN apt-get install -y ca-certificates curl gnupg
|
||||
RUN mkdir -p /etc/apt/keyrings
|
||||
RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor --yes -o /etc/apt/keyrings/nodesource.gpg
|
||||
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_"$NODE_VERSION".x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
|
||||
RUN apt-get update && apt-get install -y nodejs
|
||||
|
||||
# intel opencl gpu for openvino
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash
|
||||
|
||||
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
|
||||
ENV SCRYPTED_CAN_RESTART="true"
|
||||
ENV SCRYPTED_VOLUME="/server/volume"
|
||||
ENV SCRYPTED_INSTALL_PATH="/server"
|
||||
ENV SHELL="/bin/bash"
|
||||
|
||||
RUN test -f "/usr/bin/python3" && test -f "/usr/bin/python3.12"
|
||||
RUN test -f "/usr/bin/python3" && test -f "/usr/bin/python3.10"
|
||||
ENV SCRYPTED_PYTHON_PATH="/usr/bin/python3"
|
||||
ENV SCRYPTED_PYTHON312_PATH="/usr/bin/python3.12"
|
||||
ENV SCRYPTED_PYTHON310_PATH="/usr/bin/python3.10"
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="lite"
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
ARG BASE="ghcr.io/koush/scrypted-common:noble-nvidia"
|
||||
FROM $BASE
|
||||
FROM ghcr.io/koush/scrypted:20-jammy-full.s6
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="nvidia"
|
||||
WORKDIR /
|
||||
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=all
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
# Install miniconda
|
||||
ENV CONDA_DIR /opt/conda
|
||||
RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \
|
||||
/bin/bash ~/miniconda.sh -b -p /opt/conda
|
||||
# Put conda in path so we can use conda activate
|
||||
ENV PATH=$CONDA_DIR/bin:$PATH
|
||||
|
||||
# nvidia cudnn/libcublas etc.
|
||||
# for some reason this is not provided by the nvidia container toolkit
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-nvidia-graphics.sh | bash
|
||||
RUN conda install -c conda-forge cudatoolkit=11.2.2 cudnn=8.1.0
|
||||
ENV CONDA_PREFIX=/opt/conda
|
||||
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
ARG BASE="ghcr.io/koush/scrypted-common:noble-nvidia-legacy"
|
||||
FROM $BASE
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="nvidia"
|
||||
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES=all
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
|
||||
# nvidia cudnn/libcublas etc.
|
||||
# for some reason this is not provided by the nvidia container toolkit
|
||||
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-nvidia-graphics-legacy.sh | bash
|
||||
@@ -1,60 +0,0 @@
|
||||
ARG BASE="noble-lite"
|
||||
FROM ghcr.io/koush/scrypted-common:${BASE}
|
||||
|
||||
ENV SCRYPTED_DOCKER_FLAVOR="router"
|
||||
|
||||
# tools
|
||||
RUN apt -y update && apt -y install nano net-tools dnsutils dnsmasq vlan bridge-utils netplan.io nftables isc-dhcp-client cron
|
||||
RUN rm -f /etc/systemd/system/multi-user.target.wants/dnsmasq.service
|
||||
RUN rm -f /etc/systemd/system/sysinit.target.wants/systemd-resolved.service
|
||||
|
||||
# go + caddy
|
||||
RUN GO_VERSION=1.25.1 && ARCH=$(dpkg --print-architecture) && \
|
||||
if [ "$ARCH" = "amd64" ]; then GOARCH="amd64"; \
|
||||
elif [ "$ARCH" = "arm64" ]; then GOARCH="arm64"; \
|
||||
elif [ "$ARCH" = "armhf" ]; then GOARCH="armv6l"; \
|
||||
else echo "Unsupported architecture: $ARCH" && exit 1; fi && \
|
||||
curl -LO "https://go.dev/dl/go${GO_VERSION}.linux-${GOARCH}.tar.gz" && \
|
||||
tar -C /usr/local -xzf "go${GO_VERSION}.linux-${GOARCH}.tar.gz" && \
|
||||
rm "go${GO_VERSION}.linux-${GOARCH}.tar.gz"
|
||||
ENV PATH=$PATH:/usr/local/go/bin
|
||||
RUN apt install -y debian-keyring debian-archive-keyring apt-transport-https
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/caddy/xcaddy/gpg.key' | gpg --dearmor -o /usr/share/keyrings/caddy-xcaddy-archive-keyring.gpg
|
||||
RUN curl -1sLf 'https://dl.cloudsmith.io/public/caddy/xcaddy/debian.deb.txt' | tee /etc/apt/sources.list.d/caddy-xcaddy.list
|
||||
RUN apt -y update
|
||||
RUN apt -y install xcaddy
|
||||
RUN xcaddy build --with github.com/caddy-dns/cloudflare --output /usr/local/bin/caddy
|
||||
|
||||
# nftables
|
||||
COPY ./router/scrypted-nftables.service /etc/systemd/system
|
||||
RUN systemctl enable scrypted-nftables
|
||||
RUN bash -c 'echo include \"/etc/nftables.d/*.conf\"\; > /etc/nftables.conf'
|
||||
RUN mkdir -p /etc/nftables.d
|
||||
COPY ./router/01-scrypted.conf /etc/nftables.d
|
||||
|
||||
# ipv6 forwarding
|
||||
COPY ./router/scrypted-ip-forwarding.service /etc/systemd/system
|
||||
RUN systemctl enable scrypted-ip-forwarding
|
||||
|
||||
# install turn server, but disable it too set it up on a per interface basis.
|
||||
RUN apt -y update && apt -y install coturn && systemctl disable coturn && rm /usr/lib/systemd/system/coturn.service
|
||||
|
||||
# install usbmuxd for iphone tethering
|
||||
# ensure the pairing info stays in persistent storage
|
||||
RUN apt -y update && apt -y install usbmuxd && rm /usr/lib/systemd/system/usbmuxd.service && ln -sf /server/volume/plugins/\@scrypted/router/usbmuxd /var/lib/lockdown
|
||||
|
||||
WORKDIR /
|
||||
# cache bust
|
||||
ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
|
||||
ARG SCRYPTED_INSTALL_VERSION="latest"
|
||||
RUN test -n "$SCRYPTED_INSTALL_VERSION"
|
||||
RUN npx -y scrypted@latest install-server ${SCRYPTED_INSTALL_VERSION}
|
||||
|
||||
COPY ./router/scrypted-dhcp-watcher.service /etc/systemd/system/scrypted-dhcp-watcher.service
|
||||
RUN systemctl enable scrypted-dhcp-watcher
|
||||
|
||||
COPY ./router/scrypted.service /etc/systemd/system/scrypted.service
|
||||
RUN systemctl enable scrypted
|
||||
|
||||
WORKDIR /
|
||||
CMD ["/sbin/init"]
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE="noble-full"
|
||||
ARG BASE="20-jammy-full"
|
||||
FROM ghcr.io/koush/scrypted-common:${BASE}
|
||||
|
||||
# avahi advertiser support
|
||||
@@ -8,9 +8,6 @@ RUN apt-get update && apt-get -y install \
|
||||
libavahi-compat-libdnssd-dev \
|
||||
xz-utils
|
||||
|
||||
# killall
|
||||
RUN apt -y install psmisc
|
||||
|
||||
# copy configurations and scripts
|
||||
COPY fs /
|
||||
|
||||
@@ -49,6 +46,6 @@ ENV NODE_OPTIONS="--dns-result-order=ipv4first"
|
||||
|
||||
# changing this forces pip and npm to perform reinstalls.
|
||||
# if this base image changes, this version must be updated.
|
||||
ENV SCRYPTED_BASE_VERSION="20250101"
|
||||
ENV SCRYPTED_BASE_VERSION="20240321"
|
||||
|
||||
CMD ["/bin/sh", "-c", "ulimit -c 0; exec npm --prefix /server exec scrypted-serve"]
|
||||
CMD npm --prefix /server exec scrypted-serve
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
./docker-build.sh
|
||||
|
||||
docker build -t ghcr.io/koush/scrypted:nvidia -f Dockerfile.nvidia .
|
||||
docker build -t ghcr.io/koush/scrypted:20-jammy-full.nvidia -f Dockerfile.nvidia .
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
set -x
|
||||
|
||||
NODE_VERSION=22
|
||||
NODE_VERSION=20
|
||||
SCRYPTED_INSTALL_VERSION=beta
|
||||
IMAGE_BASE=noble
|
||||
IMAGE_BASE=jammy
|
||||
FLAVOR=full
|
||||
BASE=$NODE_VERSION-$IMAGE_BASE-$FLAVOR
|
||||
echo $BASE
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import os
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
# Define the devices to check for
|
||||
devices_to_check = [
|
||||
"/dev/dri",
|
||||
"/dev/accel",
|
||||
"/dev/apex_0",
|
||||
"/dev/apex_1",
|
||||
"/dev/kfd",
|
||||
"/dev/bus/usb"
|
||||
]
|
||||
|
||||
# Use ruamel.yaml with better formatting preservation
|
||||
yaml = YAML()
|
||||
yaml.preserve_quotes = True
|
||||
# Explicitly set roundtrip mode for comment preservation
|
||||
yaml.typ = 'rt'
|
||||
# Match the original formatting - 4 space indentation
|
||||
yaml.indent = 4
|
||||
# No special block sequence indentation
|
||||
yaml.block_seq_indent = 0
|
||||
# Don't wrap lines
|
||||
yaml.width = None
|
||||
# Preserve unicode
|
||||
yaml.allow_unicode = True
|
||||
|
||||
# Read the docker-compose.yml file
|
||||
with open('docker-compose.yml', 'r') as file:
|
||||
compose_data = yaml.load(file)
|
||||
|
||||
# Get a direct reference to the devices key
|
||||
scrypted_service = compose_data['services']['scrypted']
|
||||
devices = scrypted_service.setdefault('devices', [])
|
||||
|
||||
# Check for devices and add them if they exist
|
||||
for device_path in devices_to_check:
|
||||
if os.path.exists(device_path):
|
||||
device_mapping = f"{device_path}:{device_path}"
|
||||
if device_mapping not in devices:
|
||||
devices.append(device_mapping)
|
||||
|
||||
# Write the modified docker-compose.yml file (preserving comments and formatting)
|
||||
with open('docker-compose.yml', 'w') as file:
|
||||
yaml.dump(compose_data, file)
|
||||
@@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# run as privileged so all the devices can be detected and only the necessary ones passed through.
|
||||
|
||||
docker run --rm \
|
||||
--privileged \
|
||||
-v "$(pwd):/app" \
|
||||
-w /app \
|
||||
python:3.12-slim \
|
||||
sh -c "pip install -q --root-user-action=ignore ruamel.yaml && python docker-compose-setup.py"
|
||||
@@ -1,3 +1,5 @@
|
||||
version: "3.5"
|
||||
|
||||
# The Scrypted docker-compose.yml file typically resides at:
|
||||
# ~/.scrypted/docker-compose.yml
|
||||
|
||||
@@ -19,9 +21,6 @@
|
||||
|
||||
services:
|
||||
scrypted:
|
||||
# LXC usage only
|
||||
# lxc privileged: true
|
||||
|
||||
environment:
|
||||
# Scrypted NVR Storage (Part 2 of 3)
|
||||
|
||||
@@ -32,34 +31,27 @@ services:
|
||||
# section below.
|
||||
# - SCRYPTED_NVR_VOLUME=/nvr
|
||||
|
||||
- SCRYPTED_WEBHOOK_UPDATE_AUTHORIZATION=Bearer ${WATCHTOWER_HTTP_API_TOKEN:-env_missing_fallback}
|
||||
- SCRYPTED_WEBHOOK_UPDATE_AUTHORIZATION=Bearer SET_THIS_TO_SOME_RANDOM_TEXT
|
||||
- SCRYPTED_WEBHOOK_UPDATE=http://localhost:10444/v1/update
|
||||
|
||||
# LXC usage only
|
||||
# lxc - SCRYPTED_INSTALL_ENVIRONMENT=lxc-docker
|
||||
|
||||
# Avahi can be used for network discovery by passing in the host daemon
|
||||
# or running the daemon inside the container. Choose one or the other.
|
||||
# Uncomment next line to run avahi-daemon inside the container.
|
||||
# See volumes and security_opt section below to use the host daemon.
|
||||
# See volumes section below to use the host daemon.
|
||||
# - SCRYPTED_DOCKER_AVAHI=true
|
||||
|
||||
# NVIDIA (Part 1 of 2)
|
||||
# nvidia runtime: nvidia
|
||||
|
||||
# NVIDIA (Part 2 of 2) - Use NVIDIA image, and remove subsequent default image.
|
||||
# Valid images:
|
||||
# ghcr.io/koush/scrypted
|
||||
# ghcr.io/koush/scrypted:nvidia
|
||||
# ghcr.io/koush/scrypted:intel
|
||||
# ghcr.io/koush/scrypted:lite
|
||||
image: ghcr.io/koush/scrypted
|
||||
|
||||
# Uncomment next 3 lines for Nvidia GPU support.
|
||||
# - NVIDIA_VISIBLE_DEVICES=all
|
||||
# - NVIDIA_DRIVER_CAPABILITIES=all
|
||||
# runtime: nvidia
|
||||
# Necessary to communicate with host dbus for avahi-daemon.
|
||||
security_opt:
|
||||
- apparmor:unconfined
|
||||
volumes:
|
||||
# Scrypted NVR Storage (Part 3 of 3)
|
||||
|
||||
# Modify to add the additional volume for Scrypted NVR.
|
||||
# The following example would mount the /mnt/media/video path on the host
|
||||
# The following example would mount the /mnt/sda/video path on the host
|
||||
# to the /nvr path inside the docker container.
|
||||
# - /mnt/media/video:/nvr
|
||||
|
||||
@@ -74,25 +66,11 @@ services:
|
||||
# Ensure Avahi is running on the host machine:
|
||||
# It can be installed with: sudo apt-get install avahi-daemon
|
||||
# This is not compatible with running avahi inside the container (see above).
|
||||
# Also, uncomment the lines under security_opt
|
||||
# - /var/run/dbus:/var/run/dbus
|
||||
# - /var/run/avahi-daemon/socket:/var/run/avahi-daemon/socket
|
||||
|
||||
# Default volume for the Scrypted database. Typically should not be changed.
|
||||
# The volume will be placed relative to this docker-compose.yml.
|
||||
- ./volume:/server/volume
|
||||
|
||||
# LXC usage only
|
||||
# lxc - /var/run/docker.sock:/var/run/docker.sock
|
||||
# lxc - /root/.scrypted/docker-compose.yml:/root/.scrypted/docker-compose.yml
|
||||
# lxc - /root/.scrypted/docker-compose.sh:/root/.scrypted/docker-compose.sh
|
||||
# lxc - /root/.scrypted/.env:/root/.scrypted/.env
|
||||
# lxc - /mnt:/mnt
|
||||
|
||||
# Uncomment the following lines to use Avahi daemon from the host
|
||||
# Without this, AppArmor will block the container's attempt to talk to Avahi via dbus
|
||||
# security_opt:
|
||||
# - apparmor:unconfined
|
||||
- ~/.scrypted/volume:/server/volume
|
||||
devices: [
|
||||
# uncomment the common systems devices to pass
|
||||
# them through to docker.
|
||||
@@ -103,9 +81,6 @@ services:
|
||||
# hardware accelerated video decoding, opencl, etc.
|
||||
# "/dev/dri:/dev/dri",
|
||||
|
||||
# AMD GPU
|
||||
# "/dev/kfd:/dev/kfd",
|
||||
|
||||
# uncomment below as necessary.
|
||||
# zwave usb serial device
|
||||
|
||||
@@ -119,33 +94,28 @@ services:
|
||||
container_name: scrypted
|
||||
restart: unless-stopped
|
||||
network_mode: host
|
||||
image: ghcr.io/koush/scrypted
|
||||
|
||||
# logging is noisy and will unnecessarily wear on flash storage.
|
||||
# scrypted has per device in memory logging that is preferred.
|
||||
# enable the log file if enhanced debugging is necessary.
|
||||
logging:
|
||||
driver: "none"
|
||||
# driver: "json-file"
|
||||
# options:
|
||||
# max-size: "10m"
|
||||
# max-file: "10"
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "10"
|
||||
labels:
|
||||
- "com.centurylinklabs.watchtower.scope=scrypted"
|
||||
|
||||
# Use global DNS servers to avoid issues with some local DNS servers.
|
||||
# particularly with npm registry, etc.
|
||||
dns:
|
||||
- ${SCRYPTED_DNS_SERVER_0:-1.1.1.1}
|
||||
- ${SCRYPTED_DNS_SERVER_1:-8.8.8.8}
|
||||
|
||||
# watchtower manages updates for Scrypted.
|
||||
watchtower:
|
||||
environment:
|
||||
- WATCHTOWER_HTTP_API_TOKEN=${WATCHTOWER_HTTP_API_TOKEN:-env_missing_fallback}
|
||||
- WATCHTOWER_HTTP_API_TOKEN=SET_THIS_TO_SOME_RANDOM_TEXT
|
||||
- WATCHTOWER_HTTP_API_UPDATE=true
|
||||
- WATCHTOWER_SCOPE=scrypted
|
||||
- WATCHTOWER_HTTP_API_PERIODIC_POLLS=${WATCHTOWER_HTTP_API_PERIODIC_POLLS:-true}
|
||||
image: nickfedor/watchtower
|
||||
# remove the following line to never allow docker to auto update.
|
||||
# this is not recommended.
|
||||
- WATCHTOWER_HTTP_API_PERIODIC_POLLS=true
|
||||
image: containrrr/watchtower
|
||||
container_name: scrypted-watchtower
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
@@ -158,11 +128,3 @@ services:
|
||||
- 10444:8080
|
||||
# check for updates once an hour (interval is in seconds)
|
||||
command: --interval 3600 --cleanup --scope scrypted
|
||||
|
||||
# Use global DNS servers to avoid issues with some local DNS servers.
|
||||
# particularly with npm registry, etc.
|
||||
dns:
|
||||
- ${SCRYPTED_DNS_SERVER_0:-1.1.1.1}
|
||||
- ${SCRYPTED_DNS_SERVER_1:-8.8.8.8}
|
||||
# LXC usage only
|
||||
# lxc profiles: ["disabled"]
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# disable core dumps.
|
||||
# this doesn't disable core dumps on the scrypted service itself, only stuff run by init.
|
||||
ulimit -c 0
|
||||
|
||||
if [[ "${SCRYPTED_DOCKER_AVAHI}" != "true" ]]; then
|
||||
echo "SCRYPTED_DOCKER_AVAHI != true, won't manage dbus nor avahi-daemon" >/dev/stderr
|
||||
exit 0
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
if [ "$(uname -m)" != "x86_64" ]
|
||||
then
|
||||
echo "AMD graphics will not be installed on this architecture."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
|
||||
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
|
||||
|
||||
# needs either ubuntu 22.0.4 or 24.04
|
||||
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
|
||||
then
|
||||
echo "AMD graphics package can not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$UBUNTU_22_04" ]
|
||||
then
|
||||
distro="jammy"
|
||||
else
|
||||
distro="noble"
|
||||
fi
|
||||
|
||||
apt -y update
|
||||
apt -y install rsync gpg
|
||||
# the deb no longer seems to install a key?
|
||||
gpg --keyserver keyserver.ubuntu.com --recv-keys 9386B48A1A693C5C
|
||||
gpg --export --armor 9386B48A1A693C5C | tee /etc/apt/trusted.gpg.d/amdgpu.asc
|
||||
|
||||
# https://amdgpu-install.readthedocs.io/en/latest/install-prereq.html#installing-the-installer-package
|
||||
|
||||
# AMD keeps breaking these links. Use hard links.
|
||||
|
||||
# FILENAME=$(curl -s -L https://repo.radeon.com/amdgpu-install/latest/ubuntu/$distro/ | grep -o 'amdgpu-install_[^ ]*' | cut -d'"' -f1)
|
||||
# if [ -z "$FILENAME" ]
|
||||
# then
|
||||
# echo "AMD graphics package can not be installed. Could not find the package name."
|
||||
# exit 1
|
||||
# fi
|
||||
|
||||
set -e
|
||||
mkdir -p /tmp/amd
|
||||
cd /tmp/amd
|
||||
# curl -O -L https://repo.radeon.com/amdgpu-install/latest/ubuntu/$distro/$FILENAME
|
||||
|
||||
FILENAME=amdgpu-install_7.0.1.70001-1_all.deb
|
||||
curl -O -L https://repo.radeon.com/amdgpu-install/7.0.1/ubuntu/$distro/$FILENAME
|
||||
|
||||
dpkg -i $FILENAME
|
||||
apt -y update
|
||||
|
||||
amdgpu-install --usecase=opencl --no-dkms -y --accept-eula
|
||||
cd /tmp
|
||||
rm -rf /tmp/amd
|
||||
|
||||
@@ -1,120 +1,16 @@
|
||||
if [ "$(uname -m)" != "x86_64" ]
|
||||
then
|
||||
echo "Intel graphics will not be installed on this architecture."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
|
||||
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
|
||||
|
||||
# needs either ubuntu 22.0.4 or 24.04
|
||||
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
|
||||
then
|
||||
echo "Intel graphics package can not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -n "$UBUNTU_22_04" ]
|
||||
then
|
||||
distro="jammy"
|
||||
|
||||
else
|
||||
distro="noble"
|
||||
fi
|
||||
|
||||
# no errors beyond this point
|
||||
set -e
|
||||
|
||||
# the intel provided script is disabled since it does not work with the 6.8 kernel in Ubuntu 24.04 or Proxmox 8.2.
|
||||
# manual installation of the Intel graphics stuff is required.
|
||||
|
||||
# echo "Installing Intel graphics packages."
|
||||
# apt-get update && apt-get install -y gpg-agent &&
|
||||
# rm -f /usr/share/keyrings/intel-graphics.gpg &&
|
||||
# curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
|
||||
# echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
|
||||
# apt-get -y update &&
|
||||
# apt-get -y install intel-opencl-icd &&
|
||||
# apt-get -y dist-upgrade;
|
||||
|
||||
# need intel-media-va-driver-non-free, but all the other intel packages are installed from Intel github.
|
||||
echo "Installing Intel graphics packages."
|
||||
|
||||
if [ "$distro" == "jammy" ]
|
||||
if [ "$(uname -m)" = "x86_64" ]
|
||||
then
|
||||
echo "Installing Intel graphics packages."
|
||||
apt-get update && apt-get install -y gpg-agent &&
|
||||
rm -f /usr/share/keyrings/intel-graphics.gpg &&
|
||||
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
|
||||
echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu $distro arc" | tee /etc/apt/sources.list.d/intel.gpu.$distro.list &&
|
||||
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
|
||||
apt-get -y update &&
|
||||
apt-get -y install intel-media-va-driver-non-free &&
|
||||
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free &&
|
||||
apt-get -y dist-upgrade;
|
||||
exit $?
|
||||
else
|
||||
apt-get update && apt-get install -y gpg-agent &&
|
||||
rm -f /usr/share/keyrings/intel-graphics.gpg &&
|
||||
curl -L https://repositories.intel.com/gpu/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
|
||||
echo "deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu $distro unified" | tee /etc/apt/sources.list.d/intel-gpu-$distro.list &&
|
||||
apt-get -y update &&
|
||||
apt-get -y install intel-media-va-driver-non-free &&
|
||||
apt-get -y dist-upgrade;
|
||||
echo "Intel graphics will not be installed on this architecture."
|
||||
fi
|
||||
|
||||
|
||||
rm -rf /tmp/gpu && mkdir -p /tmp/gpu && cd /tmp/gpu
|
||||
|
||||
apt-get install -y ocl-icd-libopencl1
|
||||
|
||||
# very stupid legacy + current install process conflict.
|
||||
# install 24.35.30872.22 for legacy support. Then install latest.
|
||||
# https://github.com/intel/compute-runtime/issues/770#issuecomment-2515166915
|
||||
|
||||
# original legacy packages
|
||||
# # https://github.com/intel/compute-runtime/releases/tag/24.35.30872.22
|
||||
# curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.20/intel-igc-core_1.0.17537.20_amd64.deb
|
||||
# curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.20/intel-igc-opencl_1.0.17537.20_amd64.deb
|
||||
# # curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu-dbgsym_1.3.30872.22_amd64.ddeb
|
||||
# # curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu-legacy1-dbgsym_1.3.30872.22_amd64.ddeb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu-legacy1_1.3.30872.22_amd64.deb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu_1.3.30872.22_amd64.deb
|
||||
# # curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd-dbgsym_24.35.30872.22_amd64.ddeb
|
||||
# # curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd-legacy1-dbgsym_24.35.30872.22_amd64.ddeb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd-legacy1_24.35.30872.22_amd64.deb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd_24.35.30872.22_amd64.deb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/libigdgmm12_22.5.0_amd64.deb
|
||||
|
||||
# new legacy packages
|
||||
# https://github.com/intel/compute-runtime/releases/tag/24.35.30872.36
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.24/intel-igc-core_1.0.17537.24_amd64.deb
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.24/intel-igc-opencl_1.0.17537.24_amd64.deb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/intel-level-zero-gpu-legacy1-dbgsym_1.5.30872.36_amd64.ddeb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/intel-level-zero-gpu-legacy1_1.5.30872.36_amd64.deb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/intel-opencl-icd-legacy1-dbgsym_24.35.30872.36_amd64.ddeb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/intel-opencl-icd-legacy1_24.35.30872.36_amd64.deb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.36/libigdgmm12_22.5.0_amd64.deb
|
||||
|
||||
dpkg -i *.deb
|
||||
rm -f *.deb
|
||||
|
||||
# https://github.com/intel/compute-runtime/releases
|
||||
# note that at time of commit, IGC supports ubuntu 24.04 only possibly due to their builder being on 24.04.
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/v2.18.5/intel-igc-core-2_2.18.5+19820_amd64.deb
|
||||
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/v2.18.5/intel-igc-opencl-2_2.18.5+19820_amd64.deb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/intel-ocloc-dbgsym_25.35.35096.9-0_amd64.ddeb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/intel-ocloc_25.35.35096.9-0_amd64.deb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/intel-opencl-icd-dbgsym_25.35.35096.9-0_amd64.ddeb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/intel-opencl-icd_25.35.35096.9-0_amd64.deb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/libigdgmm12_22.8.1_amd64.deb
|
||||
# curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/libze-intel-gpu1-dbgsym_25.35.35096.9-0_amd64.ddeb
|
||||
curl -O -L https://github.com/intel/compute-runtime/releases/download/25.35.35096.9/libze-intel-gpu1_25.35.35096.9-0_amd64.deb
|
||||
|
||||
set +e
|
||||
dpkg -i *.deb
|
||||
set -e
|
||||
# the legacy + latest process says this may be necessary but it does not seem to be in a clean environment.
|
||||
apt-get -y install --fix-broken
|
||||
|
||||
|
||||
cd /tmp && rm -rf /tmp/gpu
|
||||
|
||||
apt-get -y dist-upgrade
|
||||
exit 0
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
if [ "$(uname -m)" != "x86_64" ]
|
||||
then
|
||||
echo "Intel NPU will not be installed on this architecture."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
|
||||
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
|
||||
|
||||
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
|
||||
then
|
||||
# proxmox is compatible with intel's ubuntu builds, check for /etc/pve directory
|
||||
# then determine debian version
|
||||
version=$(cat /etc/debian_version 2>/dev/null)
|
||||
|
||||
# Determine if it's Debian 12 or 13
|
||||
if [[ "$version" == 12* ]]; then
|
||||
UBUNTU_22_04=true
|
||||
elif [[ "$version" == 13* ]]; then
|
||||
UBUNTU_24_04=true
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
# needs either ubuntu 22.0.4 or 24.04
|
||||
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
|
||||
then
|
||||
echo "Intel NPU will not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -n "$UBUNTU_22_04" ]
|
||||
then
|
||||
ubuntu_distro=ubuntu2204
|
||||
distro="22.04_amd64"
|
||||
else
|
||||
ubuntu_distro=ubuntu2404
|
||||
distro="24.04_amd64"
|
||||
fi
|
||||
|
||||
dpkg --purge --force-remove-reinstreq intel-driver-compiler-npu intel-fw-npu intel-level-zero-npu
|
||||
|
||||
# no errors beyond this point
|
||||
set -e
|
||||
|
||||
rm -rf /tmp/npu && mkdir -p /tmp/npu && cd /tmp/npu
|
||||
|
||||
# level zero must also be installed
|
||||
LEVEL_ZERO_VERSION=1.24.2
|
||||
# https://github.com/oneapi-src/level-zero
|
||||
curl -O -L https://github.com/oneapi-src/level-zero/releases/download/v"$LEVEL_ZERO_VERSION"/level-zero_"$LEVEL_ZERO_VERSION"+u$distro.deb
|
||||
curl -O -L https://github.com/oneapi-src/level-zero/releases/download/v"$LEVEL_ZERO_VERSION"/level-zero-devel_"$LEVEL_ZERO_VERSION"+u$distro.deb
|
||||
|
||||
# npu driver
|
||||
# https://github.com/intel/linux-npu-driver
|
||||
NPU_VERSION=1.23.0
|
||||
NPU_VERSION_DATE=20250827-17270089246
|
||||
NPU_TAR_FILENAME=linux-npu-driver-v"$NPU_VERSION"."$NPU_VERSION_DATE"-$ubuntu_distro.tar.gz
|
||||
curl -O -L https://github.com/intel/linux-npu-driver/releases/download/v"$NPU_VERSION"/"$NPU_TAR_FILENAME"
|
||||
tar xzvf "$NPU_TAR_FILENAME"
|
||||
|
||||
# firmware can only be installed on host. will cause problems inside container.
|
||||
if [ ! -z "$INTEL_FW_NPU" ]
|
||||
then
|
||||
rm *fw-npu*
|
||||
fi
|
||||
|
||||
apt -y update
|
||||
apt -y install libtbb12
|
||||
dpkg -i *.deb
|
||||
|
||||
cd /tmp && rm -rf /tmp/npu
|
||||
|
||||
apt-get -y dist-upgrade
|
||||
|
||||
if [ -n "$INTEL_FW_NPU" ]
|
||||
then
|
||||
echo
|
||||
echo "###############################################################################"
|
||||
echo "Intel NPU firmware was installed. Reboot the host to complete the installation."
|
||||
echo "###############################################################################"
|
||||
fi
|
||||