Compare commits

..

1 Commits

Author SHA1 Message Date
Koushik Dutta
fe165295fb postrelease 2024-03-23 12:34:03 -07:00
743 changed files with 59716 additions and 42374 deletions

View File

@@ -13,11 +13,11 @@ Before opening an issue, view the device's Console logs in the Scrypted Manageme
**DO NOT OPEN ISSUES FOR ANY OF THE FOLLOWING:**
* Server or hardware setup assistance. Use Discord, Reddit, or Github Discussions.
* Server setup assistance. Use Discord, Reddit, or Github Discussions.
* Hardware setup assistance. Use Discord, Reddit, or Github Discussions.
* Feature Requests. Use Discord, Reddit, or Github Discussions.
* Packet loss in your camera logs. This is wifi/network congestion.
* HomeKit weirdness. See HomeKit troubleshooting guide.
* Release schedules or timelines. Releases are rolled out unevenly across the different server platforms.
However, if something **was working**, and is now **no longer working**, you may create a Github issue.
Created issues that do not meet these requirements or are improperly filled out will be immediately closed.
@@ -27,11 +27,6 @@ Created issues that do not meet these requirements or are improperly filled out
1. Delete this section and everything above it.
2. Fill out the sections below.
** Before You Submit**
- [ ] I checked that my issue isn't already filed: [Search open issues](https://github.com/koush/scrypted/issues).
- [ ] I checked the relevant camera/device and/or plugin `Log` in the `Management Console` for errors or warnings that may help identify and resolve the issue myself.
**Describe the bug**
A clear and concise description of what the bug is. The issue tracker is only for reporting bugs in Scrypted, for general support check Discord. Hardrware support requests or assistance requests will be immediately closed.
@@ -48,9 +43,6 @@ A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Logs**
Include a `Log` from the device/camera in the management console (and if applicable, the affacted plugin, like HomeKit).
**Server (please complete the following information):**
- OS: [e.g. Ubuntu]
- Installation Method: [e.g. Desktop App, Docker, Local]

View File

@@ -1,11 +1,11 @@
name: Build changed plugins
on:
# push:
# branches: ["main"]
# paths: ["plugins/**"]
# pull_request:
# paths: ["plugins/**"]
push:
branches: ["main"]
paths: ["plugins/**"]
pull_request:
paths: ["plugins/**"]
workflow_dispatch:
jobs:

View File

@@ -7,7 +7,7 @@ on:
pull_request:
paths: ["sdk/**"]
workflow_dispatch:
jobs:
build:
name: Build
@@ -15,11 +15,11 @@ jobs:
defaults:
run:
working-directory: ./sdk
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 22.4.1
node-version: 18
- run: npm ci
- run: npm run build

View File

@@ -7,11 +7,14 @@ jobs:
build:
name: Push Docker image to Docker Hub
runs-on: self-hosted
env:
NODE_VERSION: '20'
# runs-on: ubuntu-latest
strategy:
matrix:
BASE: ["noble"]
NODE_VERSION: [
# "18",
"20"
]
BASE: ["jammy"]
FLAVOR: ["full", "lite"]
steps:
- name: Check out the repo
@@ -20,26 +23,12 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
platforms: linux/amd64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
@@ -65,84 +54,14 @@ jobs:
uses: docker/build-push-action@v4
with:
build-args: |
NODE_VERSION=${{ env.NODE_VERSION }}
NODE_VERSION=${{ matrix.NODE_VERSION }}
BASE=${{ matrix.BASE }}
context: install/docker/
file: install/docker/Dockerfile.${{ matrix.FLAVOR }}
platforms: linux/amd64,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.FLAVOR }}
ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-${{ matrix.FLAVOR }}
cache-from: type=gha
cache-to: type=gha,mode=max
build-nvidia:
name: Push NVIDIA Docker image to Docker Hub
needs: build
runs-on: self-hosted
strategy:
matrix:
BASE: ["noble"]
steps:
- name: Check out the repo
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
platforms: linux/amd64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/arm64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_ARM64 }}
platforms: linux/arm64
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Login to Github Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push Docker image (scrypted-common)
uses: docker/build-push-action@v4
with:
build-args: |
BASE=ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-full
context: install/docker/
file: install/docker/Dockerfile.nvidia
platforms: linux/amd64,linux/arm64
push: true
tags: |
koush/scrypted-common:${{ matrix.BASE }}-nvidia
ghcr.io/koush/scrypted-common:${{ matrix.BASE }}-nvidia
koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
ghcr.io/koush/scrypted-common:${{ matrix.NODE_VERSION }}-${{ matrix.BASE }}-${{ matrix.FLAVOR }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -20,10 +20,10 @@ jobs:
strategy:
matrix:
BASE: [
["noble-nvidia", ".s6"],
["noble-full", ".s6"],
["noble-lite", ""],
"20-jammy-full",
"20-jammy-lite",
]
SUPERVISOR: ["", ".s6"]
steps:
- name: Check out the repo
uses: actions/checkout@v3
@@ -42,26 +42,12 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_AMD64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up SSH
uses: MrSquaare/ssh-setup-action@v2
with:
host: ${{ secrets.DOCKER_SSH_HOST_ARM64 }}
private-key: ${{ secrets.DOCKER_SSH_PRIVATE_KEY }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64
append: |
- endpoint: ssh://${{ secrets.DOCKER_SSH_USER }}@${{ secrets.DOCKER_SSH_HOST_AMD64 }}
platforms: linux/amd64
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
@@ -87,23 +73,23 @@ jobs:
uses: docker/build-push-action@v4
with:
build-args: |
BASE=${{ matrix.BASE[0] }}
BASE=${{ matrix.BASE }}
SCRYPTED_INSTALL_VERSION=${{ steps.package-version.outputs.NPM_VERSION }}
context: install/docker/
file: install/docker/Dockerfile${{ matrix.BASE[1] }}
file: install/docker/Dockerfile${{ matrix.SUPERVISOR }}
platforms: linux/amd64,linux/arm64
push: true
tags: |
${{ format('koush/scrypted:v{1}-{0}', matrix.BASE[0], github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE[0] == 'noble-full' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'noble-nvidia' && 'koush/scrypted:nvidia' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'noble-full' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'noble-lite' && 'koush/scrypted:lite' || '' }}
${{ format('koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '.s6' && format('koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '' && 'koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '' && 'koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'koush/scrypted:lite-s6' || '' }}
${{ format('ghcr.io/koush/scrypted:v{1}-{0}', matrix.BASE[0], github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE[0] == 'noble-full' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'noble-nvidia' && 'ghcr.io/koush/scrypted:nvidia' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'noble-full' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE[0] == 'noble-lite' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ format('ghcr.io/koush/scrypted:{0}{1}-v{2}', matrix.BASE, matrix.SUPERVISOR, github.event.inputs.publish_tag || steps.package-version.outputs.NPM_VERSION) }}
${{ matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '.s6' && format('ghcr.io/koush/scrypted:{0}', github.event.inputs.tag) || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-full' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:full' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '' && 'ghcr.io/koush/scrypted:lite' || '' }}
${{ github.event.inputs.tag == 'latest' && matrix.BASE == '20-jammy-lite' && matrix.SUPERVISOR == '.s6' && 'ghcr.io/koush/scrypted:lite-s6' || '' }}
cache-from: type=gha
cache-to: type=gha,mode=max

View File

@@ -1,44 +0,0 @@
# Simple workflow for deploying static content to GitHub Pages
name: Deploy static content to Pages
on:
# Runs on pushes targeting the default branch
push:
branches: ["main"]
paths: ["sites/static/**", ".github/workflows/static-sites.yml"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
# Upload entire repository
path: './sites/static'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@@ -9,28 +9,52 @@ on:
workflow_dispatch:
jobs:
test_local:
name: Test local installation on ${{ matrix.runner }}
runs-on: ${{ matrix.runner }}
strategy:
fail-fast: false
matrix:
runner: [ubuntu-latest, ubuntu-24.04-arm, macos-14, macos-13, windows-latest]
test_linux_local:
name: Test Linux local installation
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Parse latest server release
id: parse_server
shell: bash
- name: Run install script
run: |
VERSION=$(cat ./server/package-lock.json | jq -r '.version')
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
echo "Will test @scrypted/server@$VERSION"
- name: Install scrypted server
uses: scryptedapp/setup-scrypted@v0.0.2
with:
branch: ${{ github.sha }}
version: ${{ steps.parse_server.outputs.version }}
cat ./install/local/install-scrypted-dependencies-linux.sh | sudo SERVICE_USER=$USER bash
- name: Test server is running
run: |
systemctl status scrypted.service
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
test_mac_local:
name: Test Mac local installation
runs-on: macos-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Run install script
run: |
mkdir -p ~/.scrypted
bash ./install/local/install-scrypted-dependencies-mac.sh
- name: Test server is running
run: |
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/
test_windows_local:
name: Test Windows local installation
runs-on: windows-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Run install script
run: |
.\install\local\install-scrypted-dependencies-win.ps1
- name: Test server is running
run: |
curl -k --retry 20 --retry-all-errors --retry-max-time 600 https://localhost:10443/

12
.gitmodules vendored
View File

@@ -1,6 +1,9 @@
[submodule "plugins/unifi-protect/src/unifi-protect"]
path = external/unifi-protect
url = ../../koush/unifi-protect.git
[submodule "plugins/myq/src/myq"]
path = plugins/myq/src/myq
url = ../../koush/myq.git
[submodule "external/ring-client-api"]
path = external/ring-client-api
url = ../../koush/ring
@@ -11,6 +14,12 @@
[submodule "external/werift"]
path = external/werift
url = ../../koush/werift-webrtc
[submodule "plugins/zwave/file-stream-rotator"]
path = plugins/zwave/file-stream-rotator
url = ../../koush/file-stream-rotator.git
[submodule "sdk/developer.scrypted.app"]
path = sdk/developer.scrypted.app
url = ../../koush/developer.scrypted.app
[submodule "plugins/sample-cameraprovider"]
path = plugins/sample-cameraprovider
url = ../../koush/scrypted-sample-cameraprovider
@@ -20,3 +29,6 @@
[submodule "plugins/wyze/docker-wyze-bridge"]
path = plugins/wyze/docker-wyze-bridge
url = ../../koush/docker-wyze-bridge.git
[submodule "plugins/onvif/onvif"]
path = plugins/onvif/onvif
url = ../../koush/onvif.git

View File

@@ -1 +0,0 @@
../../../../sdk/dist/src/settings-mixin.d.ts

View File

@@ -1 +0,0 @@
../../../../sdk/dist/src/storage-settings.d.ts

145
common/package-lock.json generated
View File

@@ -10,12 +10,12 @@
"license": "ISC",
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"typescript": "^5.5.3"
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"monaco-editor": "^0.50.0",
"ts-node": "^10.9.2"
}
},
@@ -74,7 +74,7 @@
},
"../sdk": {
"name": "@scrypted/sdk",
"version": "0.3.45",
"version": "0.3.4",
"license": "ISC",
"dependencies": {
"@babel/preset-typescript": "^7.18.6",
@@ -111,58 +111,64 @@
},
"../server": {
"name": "@scrypted/server",
"version": "0.115.0",
"extraneous": true,
"hasInstallScript": true,
"version": "0.82.0",
"license": "ISC",
"dependencies": {
"@mapbox/node-pre-gyp": "^1.0.11",
"@scrypted/ffmpeg-static": "^6.1.0-build1",
"@scrypted/node-pty": "^1.0.18",
"@scrypted/types": "^0.3.33",
"adm-zip": "^0.5.14",
"@scrypted/types": "^0.3.4",
"adm-zip": "^0.5.10",
"body-parser": "^1.20.2",
"cookie-parser": "^1.4.6",
"dotenv": "^16.4.5",
"engine.io": "^6.6.0",
"express": "^4.19.2",
"follow-redirects": "^1.15.6",
"debug": "^4.3.4",
"engine.io": "^6.5.4",
"express": "^4.18.2",
"ffmpeg-static": "^5.2.0",
"follow-redirects": "^1.15.4",
"http-auth": "^4.2.0",
"ip": "^2.0.1",
"level": "^8.0.1",
"ip": "^1.1.8",
"level": "^8.0.0",
"linkfs": "^2.1.0",
"lodash": "^4.17.21",
"nan": "^2.20.0",
"memfs": "^4.6.0",
"mime": "^3.0.0",
"nan": "^2.18.0",
"node-dijkstra": "^2.5.0",
"node-forge": "^1.3.1",
"node-gyp": "^10.1.0",
"py": "npm:@bjia56/portable-python@^0.1.54",
"node-gyp": "^10.0.1",
"router": "^1.3.8",
"semver": "^7.6.2",
"sharp": "^0.33.4",
"semver": "^7.5.4",
"sharp": "^0.33.1",
"source-map-support": "^0.5.21",
"tar": "^7.4.0",
"tslib": "^2.6.3",
"typescript": "^5.5.3",
"tar": "^6.2.0",
"tslib": "^2.6.2",
"typescript": "^5.3.3",
"whatwg-mimetype": "^4.0.0",
"ws": "^8.18.0"
"ws": "^8.16.0"
},
"bin": {
"scrypted-serve": "bin/scrypted-serve"
},
"devDependencies": {
"@types/adm-zip": "^0.5.5",
"@types/cookie-parser": "^1.4.7",
"@types/cookie-parser": "^1.4.6",
"@types/debug": "^4.1.12",
"@types/express": "^4.17.21",
"@types/follow-redirects": "^1.14.4",
"@types/http-auth": "^4.1.4",
"@types/ip": "^1.1.3",
"@types/lodash": "^4.17.6",
"@types/lodash": "^4.14.202",
"@types/mime": "^3.0.4",
"@types/node-dijkstra": "^2.5.6",
"@types/node-forge": "^1.3.11",
"@types/semver": "^7.5.8",
"@types/node-forge": "^1.3.10",
"@types/pem": "^1.14.4",
"@types/semver": "^7.5.6",
"@types/source-map-support": "^0.5.10",
"@types/tar": "^6.1.10",
"@types/whatwg-mimetype": "^3.0.2",
"@types/ws": "^8.5.10"
},
"optionalDependencies": {
"node-pty-prebuilt-multiarch": "^0.10.1-pre.5"
}
},
"node_modules/@cspotcode/source-map-support": {
@@ -206,6 +212,10 @@
"resolved": "../sdk",
"link": true
},
"node_modules/@scrypted/server": {
"resolved": "../server",
"link": true
},
"node_modules/@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
@@ -298,12 +308,6 @@
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true
},
"node_modules/monaco-editor": {
"version": "0.50.0",
"resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.50.0.tgz",
"integrity": "sha512-8CclLCmrRRh+sul7C08BmPBP3P8wVWfBHomsTcndxg5NRCEPfu/mc2AGU8k37ajjDVXcXFc12ORAMUkmk+lkFA==",
"dev": true
},
"node_modules/ts-node": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
@@ -348,9 +352,9 @@
}
},
"node_modules/typescript": {
"version": "5.5.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ==",
"version": "5.3.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz",
"integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==",
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -445,6 +449,59 @@
"webpack-bundle-analyzer": "^4.5.0"
}
},
"@scrypted/server": {
"version": "file:../server",
"requires": {
"@mapbox/node-pre-gyp": "^1.0.11",
"@scrypted/types": "^0.3.4",
"@types/adm-zip": "^0.5.5",
"@types/cookie-parser": "^1.4.6",
"@types/debug": "^4.1.12",
"@types/express": "^4.17.21",
"@types/follow-redirects": "^1.14.4",
"@types/http-auth": "^4.1.4",
"@types/ip": "^1.1.3",
"@types/lodash": "^4.14.202",
"@types/mime": "^3.0.4",
"@types/node-dijkstra": "^2.5.6",
"@types/node-forge": "^1.3.10",
"@types/pem": "^1.14.4",
"@types/semver": "^7.5.6",
"@types/source-map-support": "^0.5.10",
"@types/tar": "^6.1.10",
"@types/whatwg-mimetype": "^3.0.2",
"@types/ws": "^8.5.10",
"adm-zip": "^0.5.10",
"body-parser": "^1.20.2",
"cookie-parser": "^1.4.6",
"debug": "^4.3.4",
"engine.io": "^6.5.4",
"express": "^4.18.2",
"ffmpeg-static": "^5.2.0",
"follow-redirects": "^1.15.4",
"http-auth": "^4.2.0",
"ip": "^1.1.8",
"level": "^8.0.0",
"linkfs": "^2.1.0",
"lodash": "^4.17.21",
"memfs": "^4.6.0",
"mime": "^3.0.0",
"nan": "^2.18.0",
"node-dijkstra": "^2.5.0",
"node-forge": "^1.3.1",
"node-gyp": "^10.0.1",
"node-pty-prebuilt-multiarch": "^0.10.1-pre.5",
"router": "^1.3.8",
"semver": "^7.5.4",
"sharp": "^0.33.1",
"source-map-support": "^0.5.21",
"tar": "^6.2.0",
"tslib": "^2.6.2",
"typescript": "^5.3.3",
"whatwg-mimetype": "^4.0.0",
"ws": "^8.16.0"
}
},
"@tsconfig/node10": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
@@ -522,12 +579,6 @@
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true
},
"monaco-editor": {
"version": "0.50.0",
"resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.50.0.tgz",
"integrity": "sha512-8CclLCmrRRh+sul7C08BmPBP3P8wVWfBHomsTcndxg5NRCEPfu/mc2AGU8k37ajjDVXcXFc12ORAMUkmk+lkFA==",
"dev": true
},
"ts-node": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
@@ -550,9 +601,9 @@
}
},
"typescript": {
"version": "5.5.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.3.tgz",
"integrity": "sha512-/hreyEujaB0w76zKo6717l3L0o/qEUtRgdvUBvlkhoWeOVMjMuHNHk0BRBzikzuGDqNmPQbg5ifMEqsHLiIUcQ=="
"version": "5.3.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz",
"integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw=="
},
"undici-types": {
"version": "5.26.5",

View File

@@ -12,12 +12,12 @@
"license": "ISC",
"dependencies": {
"@scrypted/sdk": "file:../sdk",
"@scrypted/server": "file:../server",
"http-auth-utils": "^5.0.1",
"typescript": "^5.5.3"
"typescript": "^5.3.3"
},
"devDependencies": {
"@types/node": "^20.11.0",
"monaco-editor": "^0.50.0",
"ts-node": "^10.9.2"
}
}

View File

@@ -1,28 +0,0 @@
export function createActivityTimeout(timeout: number, timeoutCallback: () => void) {
let dataTimeout: NodeJS.Timeout;
let lastTime = Date.now();
function resetActivityTimer() {
lastTime = Date.now();
}
function clearActivityTimer() {
clearInterval(dataTimeout);
}
if (timeout) {
dataTimeout = setInterval(() => {
if (Date.now() > lastTime + timeout) {
clearInterval(dataTimeout);
dataTimeout = undefined;
timeoutCallback();
}
}, timeout);
}
resetActivityTimer();
return {
resetActivityTimer,
clearActivityTimer,
}
}

View File

@@ -40,7 +40,7 @@ export function createAsyncQueue<T>() {
return false;
if (waiting.length) {
const deferred = waiting.shift()!;
const deferred = waiting.shift();
dequeued?.resolve();
deferred.resolve(item);
return true;
@@ -66,7 +66,7 @@ export function createAsyncQueue<T>() {
dequeued?.reject(new Error('abort'));
};
dequeued?.promise.catch(() => {}).finally(() => signal.removeEventListener('abort', h));
dequeued.promise.catch(() => {}).finally(() => signal.removeEventListener('abort', h));
signal.addEventListener('abort', h);
return true;
@@ -79,7 +79,7 @@ export function createAsyncQueue<T>() {
ended = e || new EndError();
endDeferred.resolve();
while (waiting.length) {
waiting.shift()!.reject(ended);
waiting.shift().reject(ended);
}
return true;
}
@@ -94,7 +94,7 @@ export function createAsyncQueue<T>() {
}
catch (e) {
// the yield above may raise an error, and the queue should be ended.
end(e as Error);
end(e);
if (e instanceof EndError)
return;
throw e;
@@ -155,23 +155,6 @@ export function createAsyncQueue<T>() {
}
}
export function createAsyncQueueFromGenerator<T>(generator: AsyncGenerator<T>) {
const q = createAsyncQueue<T>();
(async() => {
try {
for await (const i of generator) {
await q.enqueue(i);
}
}
catch (e) {
q.end(e as Error);
}
q.end();
})();
return q;
}
// async function testSlowEnqueue() {
// const asyncQueue = createAsyncQueue<number>();

View File

@@ -1,209 +0,0 @@
import sdk, { AudioStreamOptions, MediaStreamConfiguration, MediaStreamDestination, MediaStreamOptions, ScryptedDeviceBase, Setting } from "@scrypted/sdk";
export const automaticallyConfigureSettings: Setting = {
key: 'autoconfigure',
title: 'Automatically Configure Settings',
description: 'Automatically configure and valdiate the camera codecs and other settings for optimal Scrypted performance. Some settings will require manual configuration via the camera web admin.',
type: 'boolean',
value: true,
};
export const onvifAutoConfigureSettings: Setting = {
key: 'onvif-autoconfigure',
type: 'html',
value: 'ONVIF autoconfiguration will configure the camera codecs. <b>The camera motion sensor must still be <a target="_blank" href="https://docs.scrypted.app/camera-preparation.html#motion-sensor-setup">configured manually</a>.</b>',
};
const MEGABIT = 1024 * 1000;
function getBitrateForResolution(resolution: number) {
if (resolution >= 3840 * 2160)
return 8 * MEGABIT;
if (resolution >= 2688 * 1520)
return 3 * MEGABIT;
if (resolution >= 1920 * 1080)
return 2 * MEGABIT;
if (resolution >= 1280 * 720)
return MEGABIT;
if (resolution >= 640 * 480)
return MEGABIT / 2;
return MEGABIT / 4;
}
export async function checkPluginNeedsAutoConfigure(plugin: ScryptedDeviceBase, extraDevices = 0) {
if (plugin.storage.getItem('autoconfigure') === 'true')
return;
plugin.storage.setItem('autoconfigure', 'true');
if (sdk.deviceManager.getNativeIds().length <= 1 + extraDevices)
return;
plugin.log.a(`${plugin.name} now has support for automatic camera configuration for optimal performance. Cameras can be autoconfigured in their respective settings.`);
}
export async function autoconfigureCodecs(
getCodecs: () => Promise<MediaStreamOptions[]>,
configureCodecs: (options: MediaStreamOptions) => Promise<MediaStreamConfiguration>,
audioOptions?: AudioStreamOptions,
) {
audioOptions ||= {
codec: 'pcm_mulaw',
bitrate: 64000,
sampleRate: 8000,
};
const codecs = await getCodecs();
const configurable: MediaStreamConfiguration[] = [];
for (const codec of codecs) {
const config = await configureCodecs({
id: codec.id,
});
configurable.push(config);
}
const used: MediaStreamConfiguration[] = [];
for (const _ of ['local', 'remote', 'low-resolution'] as MediaStreamDestination[]) {
// find stream with the highest configurable resolution.
let highest: [MediaStreamConfiguration, number] = [undefined, 0];
for (const codec of configurable) {
if (used.includes(codec))
continue;
for (const resolution of codec.video.resolutions) {
if (resolution[0] * resolution[1] > highest[1]) {
highest = [codec, resolution[0] * resolution[1]];
}
}
}
const config = highest[0];
if (!config)
break;
used.push(config);
}
const findResolutionTarget = (config: MediaStreamConfiguration, width: number, height: number) => {
let diff = 999999999;
let ret: [number, number];
const targetArea = width * height;
for (const res of config.video.resolutions) {
const actualArea = res[0] * res[1];
const diffArea = Math.abs(targetArea - actualArea);
if (diffArea < diff) {
diff = diffArea;
ret = res;
}
}
return ret;
}
// find the highest resolution
const l = used[0];
const resolution = findResolutionTarget(l, 8192, 8192);
// get the fps of 20 or highest available
let fps = Math.min(20, Math.max(...l.video.fpsRange));
let errors = '';
const logConfigureCodecs = async (config: MediaStreamConfiguration) => {
try {
await configureCodecs(config);
}
catch (e) {
errors += e;
}
}
await logConfigureCodecs({
id: l.id,
video: {
width: resolution[0],
height: resolution[1],
bitrateControl: 'variable',
codec: 'h264',
bitrate: getBitrateForResolution(resolution[0] * resolution[1]),
fps,
keyframeInterval: fps * 4,
quality: 5,
profile: 'main',
},
audio: audioOptions,
});
if (used.length === 3) {
// find remote and low
const r = used[1];
const l = used[2];
const rResolution = findResolutionTarget(r, 1280, 720);
const lResolution = findResolutionTarget(l, 640, 360);
fps = Math.min(20, Math.max(...r.video.fpsRange));
await logConfigureCodecs({
id: r.id,
video: {
width: rResolution[0],
height: rResolution[1],
bitrateControl: 'variable',
codec: 'h264',
bitrate: 1 * MEGABIT,
fps,
keyframeInterval: fps * 4,
quality: 5,
profile: 'main',
},
audio: audioOptions,
});
fps = Math.min(20, Math.max(...l.video.fpsRange));
await logConfigureCodecs({
id: l.id,
video: {
width: lResolution[0],
height: lResolution[1],
bitrateControl: 'variable',
codec: 'h264',
bitrate: MEGABIT / 2,
fps,
keyframeInterval: fps * 4,
quality: 5,
profile: 'main',
},
audio: audioOptions,
});
}
else if (used.length == 2) {
let target: [number, number];
if (resolution[0] * resolution[1] > 1920 * 1080)
target = [1280, 720];
else
target = [640, 360];
const rResolution = findResolutionTarget(used[1], target[0], target[1]);
const fps = Math.min(20, Math.max(...used[1].video.fpsRange));
await logConfigureCodecs({
id: used[1].id,
video: {
width: rResolution[0],
height: rResolution[1],
bitrateControl: 'variable',
codec: 'h264',
bitrate: getBitrateForResolution(rResolution[0] * rResolution[1]),
fps,
keyframeInterval: fps * 4,
quality: 5,
profile: 'main',
},
audio: audioOptions,
});
}
else if (used.length === 1) {
// no nop
}
if (errors)
throw new Error(errors);
}

View File

@@ -41,15 +41,11 @@ export abstract class AutoenableMixinProvider extends ScryptedDeviceBase {
return true;
}
checkHasEnabledMixin(device: ScryptedDevice) {
return this.hasEnabledMixin[device.id] === this.autoIncludeToken;
}
async maybeEnableMixin(device: ScryptedDevice) {
if (!device || device.mixins?.includes(this.id))
return;
if (this.checkHasEnabledMixin(device))
if (this.hasEnabledMixin[device.id] === this.autoIncludeToken)
return;
const match = await this.canMixin(device.type, device.interfaces);

View File

@@ -1 +0,0 @@
../../server/src/deferred.ts

17
common/src/deferred.ts Normal file
View File

@@ -0,0 +1,17 @@
export class Deferred<T> {
finished = false;
resolve!: (value: T|PromiseLike<T>) => this;
reject!: (error: Error) => this;
promise: Promise<T> = new Promise((resolve, reject) => {
this.resolve = v => {
this.finished = true;
resolve(v);
return this;
};
this.reject = e => {
this.finished = true;
reject(e);
return this;
};
});
}

View File

@@ -1,96 +0,0 @@
import type * as monacoEditor from 'monaco-editor';
export interface StandardLibs {
'@types/node/globals.d.ts': string,
'@types/node/buffer.d.ts': string,
'@types/node/process.d.ts': string,
'@types/node/events.d.ts': string,
'@types/node/stream.d.ts': string,
'@types/node/fs.d.ts': string,
'@types/node/net.d.ts': string,
'@types/node/child_process.d.ts': string,
}
export interface ScryptedLibs {
'@types/sdk/settings-mixin.d.ts': string,
'@types/sdk/storage-settings.d.ts': string,
'@types/sdk/types.d.ts': string,
'@types/sdk/index.d.ts': string,
}
export function createMonacoEvalDefaultsWithLibs(standardLibs: StandardLibs, scryptedLibs: ScryptedLibs, extraLibs: { [lib: string]: string }) {
// const libs = Object.assign(scryptedLibs, extraLibs);
function monacoEvalDefaultsFunction(monaco: typeof monacoEditor, standardLibs: StandardLibs, scryptedLibs: ScryptedLibs, extraLibs: { [lib: string]: string }) {
monaco.languages.typescript.typescriptDefaults.setDiagnosticsOptions(
Object.assign(
{},
monaco.languages.typescript.typescriptDefaults.getDiagnosticsOptions(),
{
diagnosticCodesToIgnore: [1108, 1375, 1378],
}
)
);
monaco.languages.typescript.typescriptDefaults.setCompilerOptions(
Object.assign(
{},
monaco.languages.typescript.typescriptDefaults.getCompilerOptions(),
{
moduleResolution:
monaco.languages.typescript.ModuleResolutionKind.NodeJs,
}
)
);
const libs: any = {
...scryptedLibs,
...extraLibs,
};
const catLibs = Object.values(libs).join('\n');
const catlibsNoExport = Object.keys(libs)
.map(lib => libs[lib]).map(lib =>
lib.toString().replace(/export /g, '').replace(/import.*?/g, ''))
.join('\n');
monaco.languages.typescript.typescriptDefaults.addExtraLib(`
${catLibs}
declare global {
${catlibsNoExport}
const log: Logger;
const deviceManager: DeviceManager;
const endpointManager: EndpointManager;
const mediaManager: MediaManager;
const systemManager: SystemManager;
const eventSource: ScryptedDevice;
const eventDetails: EventDetails;
const eventData: any;
}
`,
"node_modules/@types/scrypted__sdk/types/index.d.ts"
);
for (const lib of Object.keys(standardLibs)) {
monaco.languages.typescript.typescriptDefaults.addExtraLib(
standardLibs[lib as keyof StandardLibs],
lib,
);
}
}
return `(function() {
const standardLibs = ${JSON.stringify(standardLibs)};
const scryptedLibs = ${JSON.stringify(scryptedLibs)};
const extraLibs = ${JSON.stringify(extraLibs)};
return (monaco) => {
(${monacoEvalDefaultsFunction})(monaco, standardLibs, scryptedLibs, extraLibs);
}
})();
`;
}

View File

@@ -1,5 +1,3 @@
import type { ScryptedDeviceBase } from "@scrypted/sdk";
export interface ScriptDevice {
/**
* @deprecated Use the default export to specify the device handler.
@@ -8,5 +6,3 @@ export interface ScriptDevice {
handle<T>(handler?: T & object): void;
handleTypes(...interfaces: string[]): void;
}
export declare const device: ScryptedDeviceBase & ScriptDevice;

View File

@@ -1,11 +1,9 @@
import sdk, { LockState, MixinDeviceBase, PanTiltZoomMovement, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors, ScryptedMimeTypes } from "@scrypted/sdk";
import { SettingsMixinDeviceBase } from "@scrypted/sdk/settings-mixin";
import { StorageSettings } from "@scrypted/sdk/storage-settings";
import sdk, { MixinDeviceBase, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedInterfaceDescriptors } from "@scrypted/sdk";
import fs from 'fs';
import type { TranspileOptions } from "typescript";
import vm from "vm";
import { createMonacoEvalDefaultsWithLibs, ScryptedLibs, StandardLibs } from "./monaco-libs";
import { ScriptDevice } from "./monaco/script-device";
import path from 'path';
const { systemManager, deviceManager, mediaManager, endpointManager } = sdk;
@@ -29,18 +27,18 @@ export function readFileAsString(f: string) {
return fs.readFileSync(f).toString();;
}
function getScryptedLibs(): ScryptedLibs {
function getTypeDefs() {
const scryptedTypesDefs = readFileAsString('@types/sdk/types.d.ts');
const scryptedIndexDefs = readFileAsString('@types/sdk/index.d.ts');
return {
"@types/sdk/index.d.ts": readFileAsString('@types/sdk/index.d.ts'),
"@types/sdk/settings-mixin.d.ts": readFileAsString('@types/sdk/settings-mixin.d.ts'),
"@types/sdk/storage-settings.d.ts": readFileAsString('@types/sdk/storage-settings.d.ts'),
"@types/sdk/types.d.ts": readFileAsString('@types/sdk/types.d.ts'),
}
scryptedIndexDefs,
scryptedTypesDefs,
};
}
export async function scryptedEval(device: ScryptedDeviceBase, script: string, extraLibs: { [lib: string]: string }, params: { [name: string]: any }) {
const libs = Object.assign({
types: getScryptedLibs()['@types/sdk/types.d.ts'],
types: getTypeDefs().scryptedTypesDefs,
}, extraLibs);
const allScripts = Object.values(libs).join('\n').toString() + script;
let compiled: string;
@@ -63,9 +61,9 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
const allParams = Object.assign({}, params, {
sdk,
fs: require('realfs'),
ScryptedDeviceBase,
MixinDeviceBase,
StorageSettings,
systemManager,
deviceManager,
endpointManager,
@@ -75,9 +73,6 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
localStorage: device.storage,
device,
exports: {} as any,
PanTiltZoomMovement,
SettingsMixinDeviceBase,
ScryptedMimeTypes,
ScryptedInterface,
ScryptedDeviceType,
// @ts-expect-error
@@ -114,18 +109,92 @@ export async function scryptedEval(device: ScryptedDeviceBase, script: string, e
}
export function createMonacoEvalDefaults(extraLibs: { [lib: string]: string }) {
const standardlibs: StandardLibs = {
"@types/node/globals.d.ts": readFileAsString('@types/node/globals.d.ts'),
"@types/node/buffer.d.ts": readFileAsString('@types/node/buffer.d.ts'),
"@types/node/process.d.ts": readFileAsString('@types/node/process.d.ts'),
"@types/node/events.d.ts": readFileAsString('@types/node/events.d.ts'),
"@types/node/stream.d.ts": readFileAsString('@types/node/stream.d.ts'),
"@types/node/fs.d.ts": readFileAsString('@types/node/fs.d.ts'),
"@types/node/net.d.ts": readFileAsString('@types/node/net.d.ts'),
"@types/node/child_process.d.ts": readFileAsString('@types/node/child_process.d.ts'),
};
const safeLibs: any = {};
return createMonacoEvalDefaultsWithLibs(standardlibs, getScryptedLibs(), extraLibs);
for (const safeLib of [
'@types/node/globals.d.ts',
'@types/node/buffer.d.ts',
'@types/node/process.d.ts',
'@types/node/events.d.ts',
'@types/node/stream.d.ts',
'@types/node/fs.d.ts',
'@types/node/net.d.ts',
'@types/node/child_process.d.ts',
]) {
safeLibs[`node_modules/${safeLib}`] = readFileAsString(safeLib)
}
const libs = Object.assign(getTypeDefs(), extraLibs);
function monacoEvalDefaultsFunction(monaco: any, safeLibs: any, libs: any) {
monaco.languages.typescript.typescriptDefaults.setDiagnosticsOptions(
Object.assign(
{},
monaco.languages.typescript.typescriptDefaults.getDiagnosticsOptions(),
{
diagnosticCodesToIgnore: [1108, 1375, 1378],
}
)
);
monaco.languages.typescript.typescriptDefaults.setCompilerOptions(
Object.assign(
{},
monaco.languages.typescript.typescriptDefaults.getCompilerOptions(),
{
moduleResolution:
monaco.languages.typescript.ModuleResolutionKind.NodeJs,
}
)
);
const catLibs = Object.values(libs).join('\n');
const catlibsNoExport = Object.keys(libs).filter(lib => lib !== 'sdk')
.map(lib => libs[lib]).map(lib =>
lib.toString().replace(/export /g, '').replace(/import.*?/g, ''))
.join('\n');
monaco.languages.typescript.typescriptDefaults.addExtraLib(`
${catLibs}
declare global {
${catlibsNoExport}
const log: Logger;
const deviceManager: DeviceManager;
const endpointManager: EndpointManager;
const mediaManager: MediaManager;
const systemManager: SystemManager;
const mqtt: MqttClient;
const device: ScryptedDeviceBase & { pathname : string };
}
`,
"node_modules/@types/scrypted__sdk/types/index.d.ts"
);
monaco.languages.typescript.typescriptDefaults.addExtraLib(
libs['sdk'],
"node_modules/@types/scrypted__sdk/index.d.ts"
);
for (const lib of Object.keys(safeLibs)) {
monaco.languages.typescript.typescriptDefaults.addExtraLib(
safeLibs[lib],
lib,
);
}
}
return `(function() {
const safeLibs = ${JSON.stringify(safeLibs)};
const libs = ${JSON.stringify(libs)};
return (monaco) => {
(${monacoEvalDefaultsFunction})(monaco, safeLibs, libs);
}
})();
`;
}
export interface ScriptDeviceImpl extends ScriptDevice {

View File

@@ -19,7 +19,7 @@ function isPi(model: string) {
export function isRaspberryPi() {
let cpuInfo: string;
try {
cpuInfo = require('fs').readFileSync('/proc/cpuinfo', { encoding: 'utf8' });
cpuInfo = require('realfs').readFileSync('/proc/cpuinfo', { encoding: 'utf8' });
}
catch (e) {
// if this fails, this is probably not a pi
@@ -70,7 +70,11 @@ export function getH264DecoderArgs(): CodecArgs {
],
};
if (os.platform() === 'linux') {
if (isRaspberryPi()) {
ret['Raspberry Pi'] = ['-c:v', 'h264_mmal'];
ret[V4L2] = ['-c:v', 'h264_v4l2m2m'];
}
else if (os.platform() === 'linux') {
ret[V4L2] = ['-c:v', 'h264_v4l2m2m'];
}
else if (os.platform() === 'win32') {

View File

@@ -1,21 +1,29 @@
import { createActivityTimeout } from '@scrypted/common/src/activity-timeout';
import { cloneDeep } from '@scrypted/common/src/clone-deep';
import { Deferred } from "@scrypted/common/src/deferred";
import { listenZeroSingleClient } from '@scrypted/common/src/listen-cluster';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from '@scrypted/common/src/media-helpers';
import { createRtspParser } from "@scrypted/common/src/rtsp-server";
import { StreamChunk, StreamParser } from '@scrypted/common/src/stream-parser';
import sdk, { FFmpegInput, RequestMediaStreamOptions, ResponseMediaStreamOptions } from "@scrypted/sdk";
import child_process, { ChildProcess, StdioOptions } from 'child_process';
import { EventEmitter } from 'events';
import { Server } from 'net';
import { Duplex } from 'stream';
import { cloneDeep } from './clone-deep';
import { Deferred } from "./deferred";
import { listenZeroSingleClient } from './listen-cluster';
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from './media-helpers';
import { createRtspParser } from "./rtsp-server";
import { parseSdp } from "./sdp-utils";
import { StreamChunk, StreamParser } from './stream-parser';
const { mediaManager } = sdk;
export interface ParserSession<T extends string> {
parserSpecific?: any;
sdp: Promise<string>;
sdp: Promise<Buffer[]>;
resetActivityTimer?: () => void,
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions, inputVideoCodec: string, inputAudioCodec: string): ResponseMediaStreamOptions;
negotiateMediaStream(requestMediaStream: RequestMediaStreamOptions): ResponseMediaStreamOptions;
inputAudioCodec?: string;
inputVideoCodec?: string;
inputVideoResolution?: {
width: number,
height: number,
},
start(): void;
kill(error?: Error): void;
killed: Promise<void>;
@@ -23,7 +31,6 @@ export interface ParserSession<T extends string> {
emit(container: T, chunk: StreamChunk): this;
on(container: T, callback: (chunk: StreamChunk) => void): this;
on(error: 'error', callback: (e: Error) => void): this;
removeListener(event: T | 'killed', callback: any): this;
once(event: T | 'killed', listener: (...args: any[]) => void): this;
}
@@ -95,37 +102,65 @@ export async function parseAudioCodec(cp: ChildProcess) {
export function setupActivityTimer(container: string, kill: (error?: Error) => void, events: {
once(event: 'killed', callback: () => void): void,
}, timeout: number) {
const ret = createActivityTimeout(timeout, () => {
let dataTimeout: NodeJS.Timeout;
function dataKill() {
const str = 'timeout waiting for data, killing parser session';
console.error(str, container);
kill(new Error(str));
});
events.once('killed', () => ret.clearActivityTimer());
return ret;
}
let lastTime = Date.now();
function resetActivityTimer() {
lastTime = Date.now();
}
function clearActivityTimer() {
clearInterval(dataTimeout);
}
if (timeout) {
dataTimeout = setInterval(() => {
if (Date.now() > lastTime + timeout) {
clearInterval(dataTimeout);
dataTimeout = undefined;
dataKill();
}
}, timeout);
}
events.once('killed', () => clearInterval(dataTimeout));
resetActivityTimer();
return {
resetActivityTimer,
clearActivityTimer,
}
}
export async function startParserSession<T extends string>(ffmpegInput: FFmpegInput, options: ParserOptions<T>): Promise<ParserSession<T>> {
const { console } = options;
let isActive = true;
const events = new EventEmitter();
// need this to prevent kill from throwing due to uncaught Error during cleanup
events.on('error', () => {});
events.on('error', e => console.error('rebroadcast error', e));
let inputAudioCodec: string;
let inputVideoCodec: string;
let inputVideoResolution: string[];
let sessionKilled: any;
const killed = new Promise<void>(resolve => {
sessionKilled = resolve;
});
const sdpDeferred = new Deferred<string>();
function kill(error?: Error) {
error ||= new Error('killed');
if (isActive) {
events.emit('killed');
events.emit('error', error);
events.emit('error', error || new Error('killed'));
}
if (!sdpDeferred.finished)
sdpDeferred.reject(error);
isActive = false;
sessionKilled();
safeKillFFmpeg(cp);
@@ -133,7 +168,6 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
const args = ffmpegInput.inputArguments.slice();
const env = ffmpegInput.env ? { ...process.env, ...ffmpegInput.env } : undefined;
const ensureActive = (killed: () => void) => {
if (!isActive) {
@@ -151,7 +185,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
const parser: StreamParser = options.parsers[container as T];
if (parser.tcpProtocol) {
const tcp = await listenZeroSingleClient('127.0.0.1');
const tcp = await listenZeroSingleClient();
const url = new URL(parser.tcpProtocol);
url.port = tcp.port.toString();
args.push(
@@ -166,7 +200,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
try {
ensureActive(() => socket.destroy());
for await (const chunk of parser.parse(socket, undefined, undefined)) {
for await (const chunk of parser.parse(socket, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
events.emit(container, chunk);
resetActivityTimer();
}
@@ -189,9 +223,8 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
// start ffmpeg process with child process pipes
args.unshift('-hide_banner');
safePrintFFmpegArguments(console, args);
const cp = child_process.spawn(ffmpegInput.ffmpegPath || await mediaManager.getFFmpegPath(), args, {
const cp = child_process.spawn(await mediaManager.getFFmpegPath(), args, {
stdio,
env,
});
ffmpegLogInitialOutput(console, cp, undefined, options?.storage);
cp.on('exit', () => kill(new Error('ffmpeg exited')));
@@ -214,7 +247,7 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
try {
const { resetActivityTimer } = setupActivityTimer(container, kill, events, options?.timeout);
for await (const chunk of parser.parse(pipe as any, undefined, undefined)) {
for await (const chunk of parser.parse(pipe as any, parseInt(inputVideoResolution?.[2]), parseInt(inputVideoResolution?.[3]))) {
await deferredStart.promise;
events.emit(container, chunk);
resetActivityTimer();
@@ -229,23 +262,42 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
const rtsp = (options.parsers as any).rtsp as ReturnType<typeof createRtspParser>;
rtsp.sdp.then(sdp => {
console?.log('sdp received from ffmpeg', sdp);
sdpDeferred.resolve(sdp);
const parsed = parseSdp(sdp);
const audio = parsed.msections.find(msection => msection.type === 'audio');
const video = parsed.msections.find(msection => msection.type === 'video');
inputVideoCodec = video?.codec;
inputAudioCodec = audio?.codec;
});
const sdp = new Deferred<Buffer[]>();
rtsp.sdp.then(r => sdp.resolve([Buffer.from(r)]));
killed.then(() => sdp.reject(new Error("ffmpeg killed before sdp could be parsed")));
start();
return {
start() {
deferredStart.resolve();
},
sdp: sdpDeferred.promise,
sdp: sdp.promise,
get inputAudioCodec() {
return inputAudioCodec;
},
get inputVideoCodec() {
return inputVideoCodec;
},
get inputVideoResolution() {
return {
width: parseInt(inputVideoResolution?.[2]),
height: parseInt(inputVideoResolution?.[3]),
}
},
get isActive() { return isActive },
kill(error?: Error) {
kill(error);
},
killed,
negotiateMediaStream: (requestMediaStream: RequestMediaStreamOptions, inputVideoCodec, inputAudioCodec) => {
negotiateMediaStream: () => {
const ret: ResponseMediaStreamOptions = cloneDeep(ffmpegInput.mediaStreamOptions) || {
id: undefined,
name: undefined,
@@ -287,3 +339,64 @@ export async function startParserSession<T extends string>(ffmpegInput: FFmpegIn
}
};
}
export interface Rebroadcaster {
server: Server;
port: number;
url: string;
clients: number;
}
export interface RebroadcastSessionCleanup {
(): void;
}
export interface RebroadcasterConnection {
writeData: (data: StreamChunk) => number;
destroy: () => void;
}
export interface RebroadcasterOptions {
connect?: (connection: RebroadcasterConnection) => RebroadcastSessionCleanup | undefined;
console?: Console;
idle?: {
timeout: number,
callback: () => void,
},
}
export function handleRebroadcasterClient(socket: Duplex, options?: RebroadcasterOptions) {
const firstWriteData = (data: StreamChunk) => {
if (data.startStream) {
socket.write(data.startStream)
}
connection.writeData = writeData;
return writeData(data);
}
const writeData = (data: StreamChunk) => {
for (const chunk of data.chunks) {
socket.write(chunk);
}
return socket.writableLength;
};
const destroy = () => {
const cb = cleanupCallback;
cleanupCallback = undefined;
socket.destroy();
cb?.();
}
const connection: RebroadcasterConnection = {
writeData: firstWriteData,
destroy,
};
let cleanupCallback = options?.connect(connection);
socket.once('close', () => {
destroy();
});
socket.on('error', e => options?.console?.log('client stream ended'));
}

View File

@@ -79,4 +79,4 @@ export async function bind(server: dgram.Socket, port: number) {
}
}
export { ListenZeroSingleClientTimeoutError, listenZero, listenZeroSingleClient } from "../../server/src/listen-zero";
export { ListenZeroSingleClientTimeoutError, listenZero, listenZeroSingleClient } from "@scrypted/server/src/listen-zero";

View File

@@ -1 +1 @@
export { safeKillFFmpeg, ffmpegLogInitialOutput, safePrintFFmpegArguments } from '../../server/src/media-helpers';
export * from '@scrypted/server/src/media-helpers';

View File

@@ -54,18 +54,18 @@ export async function read16BELengthLoop(readable: Readable, options: {
readable.on('readable', read);
await once(readable, 'end');
throw new StreamEndError('read16BELengthLoop');
throw new Error('stream ended');
}
export class StreamEndError extends Error {
constructor(where: string) {
super(`stream ended: ${where}`);
constructor() {
super('stream ended');
}
}
export async function readLength(readable: Readable, length: number): Promise<Buffer> {
if (readable.readableEnded || readable.destroyed)
throw new StreamEndError('readLength start');
throw new StreamEndError();
if (!length) {
return Buffer.alloc(0);
@@ -88,12 +88,12 @@ export async function readLength(readable: Readable, length: number): Promise<Bu
}
if (readable.readableEnded || readable.destroyed)
reject(new StreamEndError('readLength readable'));
reject(new Error("stream ended during read"));
};
const e = () => {
cleanup();
reject(new StreamEndError('readLength end'));
reject(new StreamEndError())
};
const cleanup = () => {
@@ -136,17 +136,12 @@ export async function readLine(readable: Readable) {
}
export async function readString(readable: Readable | Promise<Readable>) {
const buffer = await readBuffer(readable);
return buffer.toString();
}
export async function readBuffer(readable: Readable | Promise<Readable>) {
const buffers: Buffer[] = [];
let data = '';
readable = await readable;
readable.on('data', buffer => {
buffers.push(buffer);
data += buffer.toString();
});
readable.resume();
await once(readable, 'end')
return Buffer.concat(buffers);
return data;
}

View File

@@ -1,5 +1,5 @@
import { RpcPeer } from "../../server/src/rpc";
import { createRpcSerializer } from "../../server/src/rpc-serializer";
import { RpcPeer } from "@scrypted/server/src/rpc";
import { createRpcSerializer } from "@scrypted/server/src/rpc-serializer";
import type { RTCSignalingSession } from "@scrypted/sdk";
export async function createBrowserSignalingSession(ws: WebSocket, localName: string, remoteName: string) {

View File

@@ -41,15 +41,15 @@ export function isPeerConnectionClosed(pc: RTCPeerConnection) {
|| pc.iceConnectionState === 'closed';
}
// function silence() {
// let ctx = new AudioContext(), oscillator = ctx.createOscillator();
// const dest = ctx.createMediaStreamDestination();
// oscillator.connect(dest);
// oscillator.start();
// const ret = dest.stream.getAudioTracks()[0];
// ret.enabled = false;
// return ret;
// }
function silence() {
let ctx = new AudioContext(), oscillator = ctx.createOscillator();
const dest = ctx.createMediaStreamDestination();
oscillator.connect(dest);
oscillator.start();
const ret = dest.stream.getAudioTracks()[0];
ret.enabled = false;
return ret;
}
function createOptions() {
const options: RTCSignalingOptions = {

View File

@@ -89,44 +89,27 @@ export const H264_NAL_TYPE_FU_B = 29;
export const H264_NAL_TYPE_MTAP16 = 26;
export const H264_NAL_TYPE_MTAP32 = 27;
export const H265_NAL_TYPE_AGG = 48;
export const H265_NAL_TYPE_VPS = 32;
export const H265_NAL_TYPE_SPS = 33;
export const H265_NAL_TYPE_PPS = 34;
export const H265_NAL_TYPE_IDR_N = 19;
export const H265_NAL_TYPE_IDR_W = 20;
export function findH264NaluType(streamChunk: StreamChunk, naluType: number) {
if (streamChunk.type !== 'h264')
return;
return findH264NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
}
export function findH265NaluType(streamChunk: StreamChunk, naluType: number) {
if (streamChunk.type !== 'h265')
return;
return findH265NaluTypeInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12), naluType);
}
export function parseH264NaluType(firstNaluByte: number) {
return firstNaluByte & 0x1f;
}
export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
const checkNaluType = parseH264NaluType(nalu[0]);
const checkNaluType = nalu[0] & 0x1f;
if (checkNaluType === H264_NAL_TYPE_STAP_A) {
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = parseH264NaluType(nalu[pos]);
const stapaType = nalu[pos] & 0x1f;
if (stapaType === naluType)
return nalu.subarray(pos, pos + naluLength);
pos += naluLength;
}
}
else if (checkNaluType === H264_NAL_TYPE_FU_A) {
const fuaType = parseH264NaluType(nalu[1]);
const fuaType = nalu[1] & 0x1f;
const isFuStart = !!(nalu[1] & 0x80);
if (fuaType === naluType && isFuStart)
@@ -138,52 +121,39 @@ export function findH264NaluTypeInNalu(nalu: Buffer, naluType: number) {
return;
}
function parseH265NaluType(firstNaluByte: number) {
return (firstNaluByte & 0b01111110) >> 1;
}
export function findH265NaluTypeInNalu(nalu: Buffer, naluType: number) {
const checkNaluType = parseH265NaluType(nalu[0]);
if (checkNaluType === H265_NAL_TYPE_AGG) {
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = parseH265NaluType(nalu[pos]);
if (stapaType === naluType)
return nalu.subarray(pos, pos + naluLength);
pos += naluLength;
}
}
else if (checkNaluType === naluType) {
return nalu;
}
return;
}
export function getNaluTypes(streamChunk: StreamChunk) {
if (streamChunk.type !== 'h264')
return new Set<number>();
return getNaluTypesInNalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
}
export function getNaluFragmentInformation(nalu: Buffer) {
const naluType = nalu[0] & 0x1f;
const fua = naluType === H264_NAL_TYPE_FU_A;
return {
fua,
fuaStart: fua && !!(nalu[1] & 0x80),
fuaEnd: fua && !!(nalu[1] & 0x40),
}
}
export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
const ret = new Set<number>();
const naluType = parseH264NaluType(nalu[0]);
const naluType = nalu[0] & 0x1f;
if (naluType === H264_NAL_TYPE_STAP_A) {
ret.add(H264_NAL_TYPE_STAP_A);
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = parseH264NaluType(nalu[pos]);
const stapaType = nalu[pos] & 0x1f;
ret.add(stapaType);
pos += naluLength;
}
}
else if (naluType === H264_NAL_TYPE_FU_A) {
ret.add(H264_NAL_TYPE_FU_A);
const fuaType = parseH264NaluType(nalu[1]);
const fuaType = nalu[1] & 0x1f;
if (fuaRequireStart) {
const isFuStart = !!(nalu[1] & 0x80);
if (isFuStart)
@@ -205,33 +175,6 @@ export function getNaluTypesInNalu(nalu: Buffer, fuaRequireStart = false, fuaReq
return ret;
}
export function getH265NaluTypes(streamChunk: StreamChunk) {
if (streamChunk.type !== 'h265')
return new Set<number>();
return getNaluTypesInH265Nalu(streamChunk.chunks[streamChunk.chunks.length - 1].subarray(12))
}
export function getNaluTypesInH265Nalu(nalu: Buffer, fuaRequireStart = false, fuaRequireEnd = false) {
const ret = new Set<number>();
const naluType = parseH265NaluType(nalu[0]);
if (naluType === H265_NAL_TYPE_AGG) {
ret.add(H265_NAL_TYPE_AGG);
let pos = 1;
while (pos < nalu.length) {
const naluLength = nalu.readUInt16BE(pos);
pos += 2;
const stapaType = parseH265NaluType(nalu[pos]);
ret.add(stapaType);
pos += naluLength;
}
}
else {
ret.add(naluType);
}
return ret;
}
export function createRtspParser(options?: StreamParserOptions): RtspStreamParser {
let resolve: any;
@@ -247,30 +190,17 @@ export function createRtspParser(options?: StreamParserOptions): RtspStreamParse
'tcp',
...(options?.vcodec || []),
...(options?.acodec || []),
// linux and windows seem to support 64000 but darwin is 32000?
'-pkt_size', '32000',
'-f', 'rtsp',
],
findSyncFrame(streamChunks: StreamChunk[]) {
for (let prebufferIndex = 0; prebufferIndex < streamChunks.length; prebufferIndex++) {
const streamChunk = streamChunks[prebufferIndex];
if (streamChunk.type === 'h264') {
const naluTypes = getNaluTypes(streamChunk);
if (naluTypes.has(H264_NAL_TYPE_SPS) || naluTypes.has(H264_NAL_TYPE_IDR)) {
return streamChunks.slice(prebufferIndex);
}
if (streamChunk.type !== 'h264') {
continue;
}
else if (streamChunk.type === 'h265') {
const naluTypes = getH265NaluTypes(streamChunk);
if (naluTypes.has(H265_NAL_TYPE_VPS)
|| naluTypes.has(H265_NAL_TYPE_SPS)
|| naluTypes.has(H265_NAL_TYPE_PPS)
|| naluTypes.has(H265_NAL_TYPE_IDR_N)
|| naluTypes.has(H265_NAL_TYPE_IDR_W)
) {
return streamChunks.slice(prebufferIndex);
}
if (findH264NaluType(streamChunk, H264_NAL_TYPE_SPS) || findH264NaluType(streamChunk, H264_NAL_TYPE_IDR)) {
return streamChunks.slice(prebufferIndex);
}
}
@@ -396,7 +326,7 @@ export class RtspClient extends RtspBase {
hasGetParameter = true;
contentBase: string;
constructor(public readonly url: string) {
constructor(public url: string) {
super();
const u = new URL(url);
const port = parseInt(u.port) || 554;
@@ -508,47 +438,11 @@ export class RtspClient extends RtspBase {
}
}
catch (e) {
this.client.destroy(e as Error);
this.client.destroy(e);
throw e;
}
}
async *handleStream(): AsyncGenerator<{
rtcp: boolean,
header: Buffer,
packet: Buffer,
channel: number,
}> {
while (true) {
const header = await readLength(this.client, 4);
// can this even happen? since the RTSP request method isn't a fixed
// value like the "RTSP" in the RTSP response, I don't think so?
if (header[0] !== RTSP_FRAME_MAGIC) {
if (header.toString() !== 'RTSP')
throw this.createBadHeader(header);
this.client.unshift(header);
// do what with this?
const message = await super.readMessage();
const body = await this.readBody(parseHeaders(message));
continue;
}
const length = header.readUInt16BE(2);
const packet = await readLength(this.client, length);
const id = header.readUInt8(1);
yield {
channel: id,
rtcp: id % 2 === 1,
header,
packet,
}
}
}
async readLoop() {
const deferred = new Deferred<void>();
@@ -610,8 +504,7 @@ export class RtspClient extends RtspBase {
}
}
catch (e) {
if (!deferred.finished)
deferred.reject(e as Error);
deferred.reject(e);
this.client.destroy();
}
};
@@ -647,12 +540,10 @@ export class RtspClient extends RtspBase {
throw new Error('no WWW-Authenticate found');
const { BASIC } = await import('http-auth-utils');
// @ts-ignore
const { parseHTTPHeadersQuotedKeyValueSet } = await import('http-auth-utils/dist/utils');
if (this.wwwAuthenticate.includes('Basic')) {
const parsedUrl = new URL(this.url);
const hash = BASIC.computeHash({ username: parsedUrl.username, password: parsedUrl.password });
const hash = BASIC.computeHash(url);
return `Basic ${hash}`;
}
@@ -765,10 +656,7 @@ export class RtspClient extends RtspBase {
Accept: 'application/sdp',
});
this.contentBase = response.headers['content-base'] || response.headers['content-location'];
// content base may be a relative path? seems odd.
if (this.contentBase)
this.contentBase = new URL(this.contentBase, this.url).toString();
this.contentBase = response.headers['content-base'] || response.headers['content-location'];;
return response;
}
@@ -1166,7 +1054,7 @@ export class RtspServer {
}
export async function listenSingleRtspClient<T extends RtspServer>(options?: {
hostname: string,
hostname?: string,
pathToken?: string,
createServer?(duplex: Duplex): T,
}) {

View File

@@ -227,10 +227,6 @@ export function parseRtpMap(mline: ReturnType<typeof parseMLine>, rtpmap: string
codec = 'pcm_alaw';
ffmpegEncoder = 'pcm_alaw';
}
else if (mline.payloadTypes?.includes(14)) {
codec = 'mp3';
ffmpegEncoder = 'mp3';
}
else {
// ffmpeg seems to omit the rtpmap type for pcm alaw when creating sdp?
// is this the default?

View File

@@ -1 +1 @@
export { sleep } from "../../server/src/sleep";
export * from "@scrypted/server/src/sleep"

View File

@@ -1,50 +1,19 @@
import sdk, { ForkOptions, PluginFork } from '@scrypted/sdk';
import sdk, { PluginFork } from '@scrypted/sdk';
import worker_threads from 'worker_threads';
import { createAsyncQueue } from './async-queue';
import os from 'os';
export type Zygote<T> = () => PluginFork<T>;
export function createService<T, V>(options: ForkOptions, create: (t: Promise<T>) => Promise<V>): {
getResult: () => Promise<V>,
terminate: () => void,
} {
let killed = false;
let currentResult: Promise<V>;
let currentFork: ReturnType<typeof sdk.fork<T>>;
export function createZygote<T>(): Zygote<T> {
if (!worker_threads.isMainThread)
return;
return {
getResult() {
if (killed)
throw new Error('service terminated');
if (currentResult)
return currentResult;
currentFork = sdk.fork<T>(options);
currentFork.worker.on('exit', () => currentResult = undefined);
currentResult = create(currentFork.result);
currentResult.catch(() => currentResult = undefined);
return currentResult;
},
terminate() {
if (killed)
return;
killed = true;
currentFork.worker.terminate();
currentFork = undefined;
currentResult = undefined;
}
}
}
export function createZygote<T>(options?: ForkOptions): Zygote<T> {
let zygote = sdk.fork<T>(options);
let zygote = sdk.fork<T>();
function* next() {
while (true) {
const cur = zygote;
zygote = sdk.fork<T>(options);
zygote = sdk.fork<T>();
yield cur;
}
}

View File

@@ -1,7 +1,6 @@
{
"compilerOptions": {
"module": "commonjs",
"moduleResolution": "Node16",
"target": "esnext",
"noImplicitAny": true,
"outDir": "./dist",

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 75 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="feather feather-cpu"><rect x="4" y="4" width="16" height="16" rx="2" ry="2"></rect><rect x="9" y="9" width="6" height="6"></rect><line x1="9" y1="1" x2="9" y2="4"></line><line x1="15" y1="1" x2="15" y2="4"></line><line x1="9" y1="20" x2="9" y2="23"></line><line x1="15" y1="20" x2="15" y2="23"></line><line x1="20" y1="9" x2="23" y2="9"></line><line x1="20" y1="14" x2="23" y2="14"></line><line x1="1" y1="9" x2="4" y2="9"></line><line x1="1" y1="14" x2="4" y2="14"></line></svg>

After

Width:  |  Height:  |  Size: 667 B

View File

@@ -0,0 +1,26 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<link rel="icon" href="<%= BASE_URL %>favicon.ico">
<title>Scrypted Management Console</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Material+Icons">
<link href="https://fonts.googleapis.com/css2?family=Lato:wght@300;400;700&display=swap" rel="stylesheet">
<link href="https://fonts.googleapis.com/css2?family=Quicksand:wght@300;400;500;600;700&display=swap" rel="stylesheet">
</head>
<body>
<noscript>
<strong>We're sorry but web doesn't work properly without JavaScript enabled. Please enable it to continue.</strong>
</noscript>
<div id="app"></div>
<!-- built files will be auto injected -->
</body>
</html>

View File

@@ -0,0 +1,45 @@
{
"name": "Scrypted Management Console",
"short_name": "Scrypted",
"icons": [
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-72x72.png",
"sizes": "72x72",
"type": "image/png"
},
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-96x96.png",
"sizes": "96x96",
"type": "image/png"
},
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-144x144.png",
"sizes": "144x144",
"type": "image/png"
},
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-152x152.png",
"sizes": "152x152",
"type": "image/png"
},
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-192x192.png",
"sizes": "192x192",
"type": "image/png"
},
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-384x384.png",
"sizes": "384x384",
"type": "image/png"
},
{
"src": "https://koush.github.io/scrypted/plugins/core/ui/img/icons/icon-512x512.png",
"sizes": "512x512",
"type": "image/png"
}
],
"start_url": "./index.html",
"display": "standalone",
"background_color": "#000000",
"theme_color": "#424242"
}

View File

@@ -0,0 +1,2 @@
User-agent: *
Disallow:

View File

@@ -1,12 +1,13 @@
# Home Assistant Addon Configuration
name: Scrypted
version: "v0.120.0-jammy-full"
version: "18-jammy-full.s6-v0.93.0"
slug: scrypted
description: Scrypted is a high performance home video integration and automation platform
url: "https://github.com/koush/scrypted"
arch:
- amd64
- aarch64
- armv7
init: false
ingress: true
ingress_port: 11080

View File

@@ -16,6 +16,6 @@ ENV NODE_OPTIONS="--dns-result-order=ipv4first"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION="20250101"
ENV SCRYPTED_BASE_VERSION="20240321"
CMD ["/bin/sh", "-c", "ulimit -c 0; exec npm --prefix /server exec scrypted-serve"]
CMD npm --prefix /server exec scrypted-serve

View File

@@ -14,7 +14,12 @@ ENV DEBIAN_FRONTEND=noninteractive
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
ffmpeg \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
@@ -35,12 +40,16 @@ RUN apt-get -y install \
python3-setuptools \
python3-wheel
# these are necessary for pillow-simd, additional on disk size is small
# but could consider removing this.
RUN echo "Installing pillow-simd dependencies."
RUN apt-get -y install \
libjpeg-dev zlib1g-dev
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
RUN echo "Installing gstreamer."
# python-codecs pygobject dependencies
RUN apt-get -y install libcairo2-dev libgirepository1.0-dev
RUN apt-get -y install \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
gstreamer1.0-vaapi
# python3 gstreamer bindings
@@ -51,9 +60,8 @@ RUN apt-get -y install \
# allow pip to install to system
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
# ERROR: Cannot uninstall pip 24.0, RECORD file not found. Hint: The package was installed by debian.
# RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install debugpy
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install debugpy typing_extensions psutil
################################################################
# End section generated from template/Dockerfile.full.header
@@ -63,18 +71,9 @@ RUN python3 -m pip install debugpy
################################################################
FROM header as base
# vulkan
RUN apt -y install libvulkan1
# intel opencl for openvino
# intel opencl gpu for openvino
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash
# NPU driver will SIGILL on openvino prior to 2024.5.0
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-npu.sh | bash
# amd opencl
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-amd-graphics.sh | bash
# python 3.9 from ppa.
# 3.9 is the version with prebuilt support for tensorflow lite
RUN add-apt-repository -y ppa:deadsnakes/ppa && \
@@ -86,8 +85,8 @@ RUN add-apt-repository -y ppa:deadsnakes/ppa && \
# allow pip to install to system
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
# RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install debugpy
RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install debugpy typing_extensions psutil
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
@@ -95,20 +94,16 @@ RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" |
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update && apt-get -y install libedgetpu1-std
# set default shell to bash
RUN chsh -s /bin/bash
ENV SHELL="/bin/bash"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/ffmpeg" && test -f "/usr/bin/python3" && test -f "/usr/bin/python3.9" && test -f "/usr/bin/python3.12"
RUN test -f "/usr/bin/ffmpeg" && test -f "/usr/bin/python3" && test -f "/usr/bin/python3.9" && test -f "/usr/bin/python3.10"
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
ENV SCRYPTED_PYTHON_PATH="/usr/bin/python3"
ENV SCRYPTED_PYTHON39_PATH="/usr/bin/python3.9"
ENV SCRYPTED_PYTHON312_PATH="/usr/bin/python3.12"
ENV SCRYPTED_PYTHON310_PATH="/usr/bin/python3.10"
ENV SCRYPTED_DOCKER_FLAVOR="full"

View File

@@ -17,13 +17,16 @@ RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg -
RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_"$NODE_VERSION".x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list
RUN apt-get update && apt-get install -y nodejs
# intel opencl gpu for openvino
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/python3" && test -f "/usr/bin/python3.12"
RUN test -f "/usr/bin/python3" && test -f "/usr/bin/python3.10"
ENV SCRYPTED_PYTHON_PATH="/usr/bin/python3"
ENV SCRYPTED_PYTHON312_PATH="/usr/bin/python3.12"
ENV SCRYPTED_PYTHON310_PATH="/usr/bin/python3.10"
ENV SCRYPTED_DOCKER_FLAVOR="lite"

View File

@@ -1,9 +1,14 @@
ARG BASE="ghcr.io/koush/scrypted-common:20-jammy-full"
FROM $BASE
FROM ghcr.io/koush/scrypted:20-jammy-full.s6
ENV NVIDIA_DRIVER_CAPABILITIES=all
ENV NVIDIA_VISIBLE_DEVICES=all
WORKDIR /
# nvidia cudnn/libcublas etc.
# for some reason this is not provided by the nvidia container toolkit
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-nvidia-graphics.sh | bash
# Install miniconda
ENV CONDA_DIR /opt/conda
RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh && \
/bin/bash ~/miniconda.sh -b -p /opt/conda
# Put conda in path so we can use conda activate
ENV PATH=$CONDA_DIR/bin:$PATH
RUN conda install -c conda-forge cudatoolkit=11.2.2 cudnn=8.1.0
ENV CONDA_PREFIX=/opt/conda
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$CONDA_PREFIX/lib/

View File

@@ -46,6 +46,6 @@ ENV NODE_OPTIONS="--dns-result-order=ipv4first"
# changing this forces pip and npm to perform reinstalls.
# if this base image changes, this version must be updated.
ENV SCRYPTED_BASE_VERSION="20250101"
ENV SCRYPTED_BASE_VERSION="20240321"
CMD ["/bin/sh", "-c", "ulimit -c 0; exec npm --prefix /server exec scrypted-serve"]
CMD npm --prefix /server exec scrypted-serve

View File

@@ -1,3 +1,5 @@
version: "3.5"
# The Scrypted docker-compose.yml file typically resides at:
# ~/.scrypted/docker-compose.yml
@@ -19,9 +21,6 @@
services:
scrypted:
# LXC usage only
# lxc privileged: true
environment:
# Scrypted NVR Storage (Part 2 of 3)
@@ -32,30 +31,27 @@ services:
# section below.
# - SCRYPTED_NVR_VOLUME=/nvr
- SCRYPTED_WEBHOOK_UPDATE_AUTHORIZATION=Bearer ${WATCHTOWER_HTTP_API_TOKEN:-env_missing_fallback}
- SCRYPTED_WEBHOOK_UPDATE_AUTHORIZATION=Bearer SET_THIS_TO_SOME_RANDOM_TEXT
- SCRYPTED_WEBHOOK_UPDATE=http://localhost:10444/v1/update
# LXC usage only
# lxc - SCRYPTED_INSTALL_ENVIRONMENT=lxc-docker
# Avahi can be used for network discovery by passing in the host daemon
# or running the daemon inside the container. Choose one or the other.
# Uncomment next line to run avahi-daemon inside the container.
# See volumes and security_opt section below to use the host daemon.
# See volumes section below to use the host daemon.
# - SCRYPTED_DOCKER_AVAHI=true
# NVIDIA (Part 1 of 2)
# Uncomment next 3 lines for Nvidia GPU support.
# - NVIDIA_VISIBLE_DEVICES=all
# - NVIDIA_DRIVER_CAPABILITIES=all
# runtime: nvidia
# NVIDIA (Part 2 of 2) - Use NVIDIA image, and remove subsequent default image.
# image: ghcr.io/koush/scrypted:nvidia
image: ghcr.io/koush/scrypted
# Necessary to communicate with host dbus for avahi-daemon.
security_opt:
- apparmor:unconfined
volumes:
# Scrypted NVR Storage (Part 3 of 3)
# Modify to add the additional volume for Scrypted NVR.
# The following example would mount the /mnt/media/video path on the host
# The following example would mount the /mnt/sda/video path on the host
# to the /nvr path inside the docker container.
# - /mnt/media/video:/nvr
@@ -70,25 +66,11 @@ services:
# Ensure Avahi is running on the host machine:
# It can be installed with: sudo apt-get install avahi-daemon
# This is not compatible with running avahi inside the container (see above).
# Also, uncomment the lines under security_opt
# - /var/run/dbus:/var/run/dbus
# - /var/run/avahi-daemon/socket:/var/run/avahi-daemon/socket
# Default volume for the Scrypted database. Typically should not be changed.
# The volume will be placed relative to this docker-compose.yml.
- ./volume:/server/volume
# LXC usage only
# lxc - /var/run/docker.sock:/var/run/docker.sock
# lxc - /root/.scrypted/docker-compose.yml:/root/.scrypted/docker-compose.yml
# lxc - /root/.scrypted/docker-compose.sh:/root/.scrypted/docker-compose.sh
# lxc - /root/.scrypted/.env:/root/.scrypted/.env
# lxc - /mnt:/mnt
# Uncomment the following lines to use Avahi daemon from the host
# Without this, AppArmor will block the container's attempt to talk to Avahi via dbus
# security_opt:
# - apparmor:unconfined
- ~/.scrypted/volume:/server/volume
devices: [
# uncomment the common systems devices to pass
# them through to docker.
@@ -99,9 +81,6 @@ services:
# hardware accelerated video decoding, opencl, etc.
# "/dev/dri:/dev/dri",
# AMD GPU
# "/dev/kfd:/dev/kfd",
# uncomment below as necessary.
# zwave usb serial device
@@ -115,26 +94,27 @@ services:
container_name: scrypted
restart: unless-stopped
network_mode: host
image: ghcr.io/koush/scrypted
# logging is noisy and will unnecessarily wear on flash storage.
# scrypted has per device in memory logging that is preferred.
# enable the log file if enhanced debugging is necessary.
logging:
driver: "none"
# driver: "json-file"
# options:
# max-size: "10m"
# max-file: "10"
driver: "json-file"
options:
max-size: "10m"
max-file: "10"
labels:
- "com.centurylinklabs.watchtower.scope=scrypted"
# watchtower manages updates for Scrypted.
watchtower:
environment:
- WATCHTOWER_HTTP_API_TOKEN=${WATCHTOWER_HTTP_API_TOKEN:-env_missing_fallback}
- WATCHTOWER_HTTP_API_TOKEN=SET_THIS_TO_SOME_RANDOM_TEXT
- WATCHTOWER_HTTP_API_UPDATE=true
- WATCHTOWER_SCOPE=scrypted
- WATCHTOWER_HTTP_API_PERIODIC_POLLS=${WATCHTOWER_HTTP_API_PERIODIC_POLLS:-true}
# remove the following line to never allow docker to auto update.
# this is not recommended.
- WATCHTOWER_HTTP_API_PERIODIC_POLLS=true
image: containrrr/watchtower
container_name: scrypted-watchtower
restart: unless-stopped

View File

@@ -1,9 +1,5 @@
#!/bin/bash
# disable core dumps.
# this doesn't disable core dumps on the scrypted service itself, only stuff run by init.
ulimit -c 0
if [[ "${SCRYPTED_DOCKER_AVAHI}" != "true" ]]; then
echo "SCRYPTED_DOCKER_AVAHI != true, won't manage dbus nor avahi-daemon" >/dev/stderr
exit 0

View File

@@ -1,42 +0,0 @@
if [ "$(uname -m)" != "x86_64" ]
then
echo "AMD graphics will not be installed on this architecture."
exit 0
fi
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
# needs either ubuntu 22.0.4 or 24.04
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
then
echo "AMD graphics package can not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
exit 1
fi
if [ -n "$UBUNTU_22_04" ]
then
distro="jammy"
else
distro="noble"
fi
# https://amdgpu-install.readthedocs.io/en/latest/install-prereq.html#installing-the-installer-package
FILENAME=$(curl -s -L https://repo.radeon.com/amdgpu-install/latest/ubuntu/$distro/ | grep -o 'amdgpu-install_[^ ]*' | cut -d'"' -f1)
if [ -z "$FILENAME" ]
then
echo "AMD graphics package can not be installed. Could not find the package name."
exit 1
fi
set -e
mkdir -p /tmp/amd
cd /tmp/amd
curl -O -L http://repo.radeon.com/amdgpu-install/latest/ubuntu/$distro/$FILENAME
apt -y install rsync
dpkg -i $FILENAME
amdgpu-install --usecase=opencl --no-dkms -y --accept-eula
cd /tmp
rm -rf /tmp/amd

View File

@@ -1,79 +1,16 @@
if [ "$(uname -m)" != "x86_64" ]
if [ "$(uname -m)" = "x86_64" ]
then
echo "Installing Intel graphics packages."
apt-get update && apt-get install -y gpg-agent &&
rm -f /usr/share/keyrings/intel-graphics.gpg &&
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
apt-get -y update &&
apt-get -y install intel-opencl-icd intel-media-va-driver-non-free &&
apt-get -y dist-upgrade;
exit $?
else
echo "Intel graphics will not be installed on this architecture."
exit 0
fi
# no errors beyond this point
set -e
# the intel provided script is disabled since it does not work with the 6.8 kernel in Ubuntu 24.04 or Proxmox 8.2.
# manual installation of the Intel graphics stuff is required.
# echo "Installing Intel graphics packages."
# apt-get update && apt-get install -y gpg-agent &&
# rm -f /usr/share/keyrings/intel-graphics.gpg &&
# curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
# echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
# apt-get -y update &&
# apt-get -y install intel-opencl-icd &&
# apt-get -y dist-upgrade;
# need intel-media-va-driver-non-free, but all the other intel packages are installed from Intel github.
echo "Installing Intel graphics packages."
apt-get update && apt-get install -y gpg-agent &&
rm -f /usr/share/keyrings/intel-graphics.gpg &&
curl -L https://repositories.intel.com/graphics/intel-graphics.key | gpg --dearmor --yes --output /usr/share/keyrings/intel-graphics.gpg &&
echo 'deb [arch=amd64,i386 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/graphics/ubuntu jammy arc' | tee /etc/apt/sources.list.d/intel.gpu.jammy.list &&
apt-get -y update &&
apt-get -y install intel-media-va-driver-non-free &&
apt-get -y dist-upgrade;
rm -rf /tmp/gpu && mkdir -p /tmp/gpu && cd /tmp/gpu
apt-get install -y ocl-icd-libopencl1
# very stupid legacy + current install process conflict.
# install 24.35.30872.22 for legacy support. Then install latest.
# https://github.com/intel/compute-runtime/issues/770#issuecomment-2515166915
# https://github.com/intel/compute-runtime/releases/tag/24.35.30872.22
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.20/intel-igc-core_1.0.17537.20_amd64.deb
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/igc-1.0.17537.20/intel-igc-opencl_1.0.17537.20_amd64.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu-dbgsym_1.3.30872.22_amd64.ddeb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu-legacy1-dbgsym_1.3.30872.22_amd64.ddeb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu-legacy1_1.3.30872.22_amd64.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-level-zero-gpu_1.3.30872.22_amd64.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd-dbgsym_24.35.30872.22_amd64.ddeb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd-legacy1-dbgsym_24.35.30872.22_amd64.ddeb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd-legacy1_24.35.30872.22_amd64.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/intel-opencl-icd_24.35.30872.22_amd64.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/24.35.30872.22/libigdgmm12_22.5.0_amd64.deb
dpkg -i *.deb
rm -f *.deb
# https://github.com/intel/compute-runtime/releases/tag/24.45.31740.9
# note that at time of commit, IGC supports ubuntu 24.04 only possibly due to their builder being on 24.04.
IGC_VERSION=2_2.1.12+18087_amd64
COMPUTE_VERSION=24.45.31740.9
ZERO_GPU_VERSION=1.6.31740.9_amd64
LIBIGDGMM_VERSION=22.5.2_amd64
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/v2.1.12/intel-igc-core-$IGC_VERSION.deb
curl -O -L https://github.com/intel/intel-graphics-compiler/releases/download/v2.1.12/intel-igc-opencl-$IGC_VERSION.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/$COMPUTE_VERSION/intel-level-zero-gpu-dbgsym_$ZERO_GPU_VERSION.ddeb
curl -O -L https://github.com/intel/compute-runtime/releases/download/$COMPUTE_VERSION/intel-level-zero-gpu_$ZERO_GPU_VERSION.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/$COMPUTE_VERSION/intel-opencl-icd-dbgsym_"$COMPUTE_VERSION"_amd64.ddeb
curl -O -L https://github.com/intel/compute-runtime/releases/download/$COMPUTE_VERSION/intel-opencl-icd_"$COMPUTE_VERSION"_amd64.deb
curl -O -L https://github.com/intel/compute-runtime/releases/download/$COMPUTE_VERSION/libigdgmm12_$LIBIGDGMM_VERSION.deb
set +e
dpkg -i *.deb
set -e
# the legacy + latest process says this may be necessary but it does not seem to be in a clean environment.
apt-get install --fix-broken
cd /tmp && rm -rf /tmp/gpu
apt-get -y dist-upgrade
exit 0

View File

@@ -1,72 +0,0 @@
if [ "$(uname -m)" != "x86_64" ]
then
echo "Intel NPU will not be installed on this architecture."
exit 0
fi
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
then
# proxmox is compatible with ubuntu 22.04, check for /etc/pve directory
if [ -d "/etc/pve" ]
then
UBUNTU_22_04=true
fi
fi
# needs either ubuntu 22.0.4 or 24.04
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
then
echo "Intel NPU will not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
exit 0
fi
if [ -n "$UBUNTU_22_04" ]
then
distro="22.04_amd64"
else
distro="24.04_amd64"
fi
dpkg --purge --force-remove-reinstreq intel-driver-compiler-npu intel-fw-npu intel-level-zero-npu
# no errors beyond this point
set -e
rm -rf /tmp/npu && mkdir -p /tmp/npu && cd /tmp/npu
# level zero must also be installed
LEVEL_ZERO_VERSION=1.19.2
# https://github.com/oneapi-src/level-zero
curl -O -L https://github.com/oneapi-src/level-zero/releases/download/v"$LEVEL_ZERO_VERSION"/level-zero_"$LEVEL_ZERO_VERSION"+u$distro.deb
curl -O -L https://github.com/oneapi-src/level-zero/releases/download/v"$LEVEL_ZERO_VERSION"/level-zero-devel_"$LEVEL_ZERO_VERSION"+u$distro.deb
# npu driver
# https://github.com/intel/linux-npu-driver
NPU_VERSION=1.10.0
NPU_VERSION_DATE=20241107-11729849322
curl -O -L https://github.com/intel/linux-npu-driver/releases/download/v"$NPU_VERSION"/intel-driver-compiler-npu_$NPU_VERSION."$NPU_VERSION_DATE"_ubuntu$distro.deb
# firmware can only be installed on host. will cause problems inside container.
if [ -n "$INTEL_FW_NPU" ]
then
curl -O -L https://github.com/intel/linux-npu-driver/releases/download/v"$NPU_VERSION"/intel-fw-npu_$NPU_VERSION."$NPU_VERSION_DATE"_ubuntu$distro.deb
fi
curl -O -L https://github.com/intel/linux-npu-driver/releases/download/v"$NPU_VERSION"/intel-level-zero-npu_$NPU_VERSION."$NPU_VERSION_DATE"_ubuntu$distro.deb
apt -y update
apt -y install libtbb12
dpkg -i *.deb
cd /tmp && rm -rf /tmp/npu
apt-get -y dist-upgrade
if [ -n "$INTEL_FW_NPU" ]
then
echo
echo "###############################################################################"
echo "Intel NPU firmware was installed. Reboot the host to complete the installation."
echo "###############################################################################"
fi

View File

@@ -1,43 +0,0 @@
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
set -e
# Install CUDA for 22.04
# https://developer.nvidia.com/cuda-downloads?target_os=Linux&target_arch=x86_64&Distribution=Ubuntu&target_version=24.04&target_type=deb_network
# Install CUDA for 24.04
# https://developer.nvidia.com/cuda-downloads?target_os=Linux&target_arch=x86_64&Distribution=Ubuntu&target_version=24.04&target_type=deb_network
# Do not apt install nvidia-open, must use cuda-drivers.
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
then
echo "NVIDIA container toolkit can not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
exit 1
fi
if [ -n "$UBUNTU_22_04" ]
then
distro="ubuntu2204"
else
distro="ubuntu2404"
fi
apt update -q \
&& apt install -y wget \
&& wget -qO /cuda-keyring.deb https://developer.download.nvidia.com/compute/cuda/repos/$distro/$(uname -m)/cuda-keyring_1.1-1_all.deb \
&& dpkg -i /cuda-keyring.deb;
# https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html
apt -y update
apt -y install gpg
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --yes --dearmor -o /usr/share/keyrings/nvidia-container-toolkit-keyring.gpg \
&& curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list | \
sed 's#deb https://#deb [signed-by=/usr/share/keyrings/nvidia-container-toolkit-keyring.gpg] https://#g' | \
tee /etc/apt/sources.list.d/nvidia-container-toolkit.list
apt -y update
# is there a way to get a versioned package automatically?
apt -y install cuda-drivers
apt -y install nvidia-container-toolkit
nvidia-ctk runtime configure --runtime=docker
systemctl restart docker

View File

@@ -1,54 +0,0 @@
if [ "$(uname -m)" = "x86_64" ]
then
UBUNTU_22_04=$(lsb_release -r | grep "22.04")
UBUNTU_24_04=$(lsb_release -r | grep "24.04")
# needs either ubuntu 22.0.4 or 24.04
if [ -z "$UBUNTU_22_04" ] && [ -z "$UBUNTU_24_04" ]
then
echo "NVIDIA graphics package can not be installed. Ubuntu version could not be detected when checking lsb-release and /etc/os-release."
exit 1
fi
if [ -n "$UBUNTU_22_04" ]
then
distro="ubuntu2204"
else
distro="ubuntu2404"
fi
echo "Installing NVIDIA graphics packages."
apt update -q \
&& apt install -y wget \
&& wget -qO /cuda-keyring.deb https://developer.download.nvidia.com/compute/cuda/repos/$distro/$(uname -m)/cuda-keyring_1.1-1_all.deb \
&& dpkg -i /cuda-keyring.deb \
&& apt update -q \
&& apt install -y cuda-nvcc-12-6 libcublas-12-6 libcudnn9-cuda-12 cuda-libraries-12-6;
if [ "$?" != "0" ]
then
echo "Error: NVIDIA graphics packages failed to install."
exit 1
fi
# Update: the libnvidia-opencl.so.1 file is not present in the container image, it is
# mounted via the nvidia container runtime. This is why the following check is commented out.
# this file is present but for some reason the icd file is not created by nvidia runtime.
# if [ ! -f "/usr/lib/x86_64-linux-gnu/libnvidia-opencl.so.1" ]
# then
# echo "Error: NVIDIA OpenCL library not found."
# exit 1
# fi
# the container runtime doesn't mount this file for some reason. seems to be a bug.
# https://github.com/NVIDIA/nvidia-container-toolkit/issues/682
# but the contents are simply the .so file, which is a symlink the nvidia runtime
# will mount in.
mkdir -p /etc/OpenCL/vendors/
echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd
else
echo "NVIDIA graphics will not be installed on this architecture."
fi
exit 0

View File

@@ -1,11 +1,5 @@
#!/usr/bin/env bash
if [ "$SCRYPTED_LXC" ]
then
export SERVICE_USER="root"
export SCRYPTED_NONINTERACTIVE="true"
fi
if [ -z "$SERVICE_USER" ]
then
echo "Scrypted SERVICE_USER environment variable was not specified. Service will not be installed."
@@ -13,12 +7,6 @@ then
fi
function readyn() {
if [ ! -z "$SCRYPTED_NONINTERACTIVE" ]
then
yn="y"
return
fi
while true; do
read -p "$1 (y/n) " yn
case $yn in
@@ -45,11 +33,6 @@ systemctl disable scrypted.service 2> /dev/null
USER_HOME=$(eval echo ~$SERVICE_USER)
SCRYPTED_HOME=$USER_HOME/.scrypted
mkdir -p $SCRYPTED_HOME
# remove various things from a previous local install.
rm -rf $SCRYPTED_HOME/node_modules
rm -rf $SCRYPTED_HOME/install.json
rm -rf $SCRYPTED_HOME/package.json
rm -rf $SCRYPTED_HOME/package-lock.json
set -e
cd $SCRYPTED_HOME
@@ -63,34 +46,13 @@ then
usermod -aG docker $SERVICE_USER
fi
WATCHTOWER_HTTP_API_TOKEN=$(echo $RANDOM | md5sum | head -c 32)
echo "WATCHTOWER_HTTP_API_TOKEN=$WATCHTOWER_HTTP_API_TOKEN" > $SCRYPTED_HOME/.env
# remove the following line from .env to disable autoupdates.
# this is not recommended.
echo "WATCHTOWER_HTTP_API_PERIODIC_POLLS=true" >> $SCRYPTED_HOME/.env
WATCHTOWER_HTTP_API_TOKEN=$(echo $RANDOM | md5sum)
DOCKER_COMPOSE_YML=$SCRYPTED_HOME/docker-compose.yml
curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/docker-compose.yml > $DOCKER_COMPOSE_YML
echo "Created $DOCKER_COMPOSE_YML"
if [ -z "$SCRYPTED_LXC" ]
curl -s https://raw.githubusercontent.com/koush/scrypted/main/install/docker/docker-compose.yml | sed s/SET_THIS_TO_SOME_RANDOM_TEXT/"$(echo $RANDOM | md5sum | head -c 32)"/g > $DOCKER_COMPOSE_YML
if [ -d /dev/dri ]
then
if [ -e /dev/dri ]
then
sed -i 's/'#' "\/dev\/dri/"\/dev\/dri/g' $DOCKER_COMPOSE_YML
fi
if [ -e /dev/kfd ]
then
sed -i 's/'#' "\/dev\/kfd/"\/dev\/kfd/g' $DOCKER_COMPOSE_YML
fi
else
# uncomment lxc specific stuff
sed -i 's/'#' lxc //g' $DOCKER_COMPOSE_YML
# never restart, systemd will handle it
sed -i 's/restart: unless-stopped/restart: no/g' $DOCKER_COMPOSE_YML
sudo systemctl stop apparmor || true
sudo apt -y purge apparmor || true
sed -i 's/'#' "\/dev\/dri/"\/dev\/dri/g' $DOCKER_COMPOSE_YML
fi
readyn "Install avahi-daemon? This is the recommended for reliable HomeKit discovery and pairing."
@@ -99,12 +61,10 @@ then
sudo apt-get -y install avahi-daemon
sed -i 's/'#' - \/var\/run\/dbus/- \/var\/run\/dbus/g' $DOCKER_COMPOSE_YML
sed -i 's/'#' - \/var\/run\/avahi-daemon/- \/var\/run\/avahi-daemon/g' $DOCKER_COMPOSE_YML
sed -i 's/'#' security_opt:/security_opt:/g' $DOCKER_COMPOSE_YML
sed -i 's/'#' - apparmor:unconfined/ - apparmor:unconfined/g' $DOCKER_COMPOSE_YML
fi
echo "Setting permissions on $SCRYPTED_HOME"
chown -R $SERVICE_USER $SCRYPTED_HOME || true
chown -R $SERVICE_USER $SCRYPTED_HOME
set +e
@@ -117,41 +77,8 @@ set -e
echo "docker compose pull"
sudo -u $SERVICE_USER docker compose pull
if [ -z "$SCRYPTED_LXC" ]
then
echo "docker compose up -d"
sudo -u $SERVICE_USER docker compose up -d
else
export DOCKER_COMPOSE_SH=$SCRYPTED_HOME/docker-compose.sh
curl https://raw.githubusercontent.com/koush/scrypted/main/install/proxmox/docker-compose.sh > $DOCKER_COMPOSE_SH
chmod +x $DOCKER_COMPOSE_SH
cat > /etc/systemd/system/scrypted.service <<EOT
[Unit]
Description=Scrypted service
After=network.target
[Service]
User=root
Group=root
Type=simple
ExecStart=$DOCKER_COMPOSE_SH
Restart=always
RestartSec=3
StandardOutput=null
StandardError=null
[Install]
WantedBy=multi-user.target
EOT
systemctl daemon-reload
systemctl enable scrypted.service
systemctl restart scrypted.service
fi
echo "docker compose up -d"
sudo -u $SERVICE_USER docker compose up -d
echo
echo
@@ -162,5 +89,5 @@ echo "Note that it is https and that you'll be asked to approve/ignore the websi
echo
echo
echo "Optional:"
echo "Scrypted NVR Recording storage directory can be configured with an additional script located at:"
echo "https://docs.scrypted.app/scrypted-nvr/recording-storage.html#docker-volume"
echo "Scrypted NVR Recording storage directory can be configured with an additional script:"
echo "https://docs.scrypted.app/scrypted-nvr/installation.html#docker-volume"

View File

@@ -72,7 +72,6 @@ function removescryptedfstab() {
grep -v "scrypted-nvr" /etc/fstab > /tmp/fstab && cp /tmp/fstab /etc/fstab
# ensure newline
sed -i -e '$a\' /etc/fstab
systemctl daemon-reload
}
BLOCK_DEVICE="/dev/$1"
@@ -96,17 +95,7 @@ then
set +e
sync
PARTITION_DEVICE="$BLOCK_DEVICE"1
if [ ! -e "$PARTITION_DEVICE" ]
then
PARTITION_DEVICE="$BLOCK_DEVICE"p1
if [ ! -e "$PARTITION_DEVICE" ]
then
echo "Unable to determine block device partition from block device: $BLOCK_DEVICE"
exit 1
fi
fi
mkfs -F -t ext4 "$PARTITION_DEVICE"
mkfs -F -t ext4 "$BLOCK_DEVICE"1
sync
# parse/evaluate blkid line as env vars
@@ -128,9 +117,8 @@ then
set -e
removescryptedfstab
mkdir -p /mnt/scrypted-nvr
echo "UUID=$UUID /mnt/scrypted-nvr ext4 defaults,nofail,noatime,x-systemd.automount 0 0" >> /etc/fstab
echo "PARTLABEL=scrypted-nvr /mnt/scrypted-nvr ext4 defaults,nofail 0 0" >> /etc/fstab
mount -a
systemctl daemon-reload
set +e
DIR="/mnt/scrypted-nvr"

View File

@@ -3,18 +3,9 @@
################################################################
FROM header as base
# vulkan
RUN apt -y install libvulkan1
# intel opencl for openvino
# intel opencl gpu for openvino
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-graphics.sh | bash
# NPU driver will SIGILL on openvino prior to 2024.5.0
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-intel-npu.sh | bash
# amd opencl
RUN curl https://raw.githubusercontent.com/koush/scrypted/main/install/docker/install-amd-graphics.sh | bash
# python 3.9 from ppa.
# 3.9 is the version with prebuilt support for tensorflow lite
RUN add-apt-repository -y ppa:deadsnakes/ppa && \
@@ -26,8 +17,8 @@ RUN add-apt-repository -y ppa:deadsnakes/ppa && \
# allow pip to install to system
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
# RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install debugpy
RUN python3.9 -m pip install --upgrade pip
RUN python3.9 -m pip install debugpy typing_extensions psutil
# Coral Edge TPU
# https://coral.ai/docs/accelerator/get-started/#runtime-on-linux
@@ -35,20 +26,16 @@ RUN echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" |
RUN curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
RUN apt-get -y update && apt-get -y install libedgetpu1-std
# set default shell to bash
RUN chsh -s /bin/bash
ENV SHELL="/bin/bash"
ENV SCRYPTED_INSTALL_ENVIRONMENT="docker"
ENV SCRYPTED_CAN_RESTART="true"
ENV SCRYPTED_VOLUME="/server/volume"
ENV SCRYPTED_INSTALL_PATH="/server"
RUN test -f "/usr/bin/ffmpeg" && test -f "/usr/bin/python3" && test -f "/usr/bin/python3.9" && test -f "/usr/bin/python3.12"
RUN test -f "/usr/bin/ffmpeg" && test -f "/usr/bin/python3" && test -f "/usr/bin/python3.9" && test -f "/usr/bin/python3.10"
ENV SCRYPTED_FFMPEG_PATH="/usr/bin/ffmpeg"
ENV SCRYPTED_PYTHON_PATH="/usr/bin/python3"
ENV SCRYPTED_PYTHON39_PATH="/usr/bin/python3.9"
ENV SCRYPTED_PYTHON312_PATH="/usr/bin/python3.12"
ENV SCRYPTED_PYTHON310_PATH="/usr/bin/python3.10"
ENV SCRYPTED_DOCKER_FLAVOR="full"

View File

@@ -11,7 +11,12 @@ ENV DEBIAN_FRONTEND=noninteractive
# base tools and development stuff
RUN apt-get update && apt-get -y install \
curl software-properties-common apt-utils \
build-essential \
cmake \
ffmpeg \
gcc \
libcairo2-dev \
libgirepository1.0-dev \
pkg-config && \
apt-get -y update && \
apt-get -y upgrade
@@ -32,12 +37,16 @@ RUN apt-get -y install \
python3-setuptools \
python3-wheel
# these are necessary for pillow-simd, additional on disk size is small
# but could consider removing this.
RUN echo "Installing pillow-simd dependencies."
RUN apt-get -y install \
libjpeg-dev zlib1g-dev
# gstreamer native https://gstreamer.freedesktop.org/documentation/installing/on-linux.html?gi-language=c#install-gstreamer-on-ubuntu-or-debian
RUN echo "Installing gstreamer."
# python-codecs pygobject dependencies
RUN apt-get -y install libcairo2-dev libgirepository1.0-dev
RUN apt-get -y install \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav \
gstreamer1.0-tools gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-bad gstreamer1.0-libav gstreamer1.0-alsa \
gstreamer1.0-vaapi
# python3 gstreamer bindings
@@ -48,9 +57,8 @@ RUN apt-get -y install \
# allow pip to install to system
RUN rm -f /usr/lib/python**/EXTERNALLY-MANAGED
# ERROR: Cannot uninstall pip 24.0, RECORD file not found. Hint: The package was installed by debian.
# RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install debugpy
RUN python3 -m pip install --upgrade pip
RUN python3 -m pip install debugpy typing_extensions psutil
################################################################
# End section generated from template/Dockerfile.full.header

View File

@@ -97,7 +97,7 @@ echo "docker compose rm -rf"
sudo -u $SERVICE_USER docker rm -f /scrypted /scrypted-watchtower 2> /dev/null
echo "Installing Scrypted..."
RUN sudo -u $SERVICE_USER npx -y scrypted@latest install-server $SCRYPTED_INSTALL_VERSION
RUN sudo -u $SERVICE_USER npx -y scrypted@latest install-server
cat > /etc/systemd/system/scrypted.service <<EOT
@@ -110,12 +110,10 @@ User=$SERVICE_USER
Group=$SERVICE_USER
Type=simple
ExecStart=/usr/bin/npx -y scrypted serve
Restart=always
Restart=on-failure
RestartSec=3
Environment="NODE_OPTIONS=$NODE_OPTIONS"
Environment="SCRYPTED_INSTALL_ENVIRONMENT=$SCRYPTED_INSTALL_ENVIRONMENT"
StandardOutput=null
StandardError=null
[Install]
WantedBy=multi-user.target

View File

@@ -40,6 +40,8 @@ echo "Installing Scrypted dependencies..."
RUN_IGNORE xcode-select --install
RUN brew update
RUN_IGNORE brew install node@20
# snapshot plugin and others
RUN brew install libvips
# dlib
RUN brew install cmake
@@ -69,14 +71,11 @@ then
fi
RUN python$PYTHON_VERSION -m pip install --upgrade pip
# besides debugpy, none of these dependencies are needed anymore?
# portable python includes typing and does not need typing_extensions.
# opencv-python-headless has wheels for macos.
if [ "$PYTHON_VERSION" != "3.10" ]
then
RUN python$PYTHON_VERSION -m pip install typing
fi
RUN python$PYTHON_VERSION -m pip install debugpy typing_extensions opencv-python
RUN python$PYTHON_VERSION -m pip install debugpy typing_extensions opencv-python psutil
echo "Installing Scrypted Launch Agent..."
@@ -122,7 +121,7 @@ then
fi
echo "Installing Scrypted..."
RUN $NPX_PATH -y scrypted@latest install-server $SCRYPTED_INSTALL_VERSION
RUN $NPX_PATH -y scrypted@latest install-server
cat > ~/Library/LaunchAgents/app.scrypted.server.plist <<EOT
<?xml version="1.0" encoding="UTF-8"?>

View File

@@ -1,5 +1,3 @@
#Requires -RunAsAdministrator
# Set-PSDebug -Trace 1
# stop existing service if any
@@ -10,10 +8,10 @@ sc.exe stop scrypted.exe
iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
# Install node.js
choco upgrade -y nodejs-lts --version=20.18.0
choco upgrade -y nodejs-lts --version=20.11.1
# Install VC Redist, which is necessary for portable python
choco install -y vcredist140
choco install vcredist140
# TODO: remove python install, and use portable python
# Install Python
@@ -24,24 +22,11 @@ $SCRYPTED_WINDOWS_PYTHON_VERSION="-3.9"
# Refresh environment variables for py and npx to work
$env:Path = [System.Environment]::GetEnvironmentVariable("Path","Machine") + ";" + [System.Environment]::GetEnvironmentVariable("Path","User")
# Workaround Windows Node no longer creating %APPDATA%\npm which causes npx to fail
# Fixed in newer versions of NPM but not the one bundled with Node 20
# https://github.com/nodejs/node/issues/53538
npm i -g npm
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install --upgrade pip
# besides debugpy, none of these dependencies are needed anymore?
# portable python includes typing and does not need typing_extensions.
# opencv-python-headless has wheels for windows.
py $SCRYPTED_WINDOWS_PYTHON_VERSION -m pip install debugpy typing_extensions typing opencv-python
$SCRYPTED_INSTALL_VERSION=[System.Environment]::GetEnvironmentVariable("SCRYPTED_INSTALL_VERSION","User")
if ($SCRYPTED_INSTALL_VERSION -eq $null) {
npx -y scrypted@latest install-server
} else {
npx -y scrypted@latest install-server $SCRYPTED_INSTALL_VERSION
}
npx -y scrypted@latest install-server
$USER_HOME_ESCAPED = $env:USERPROFILE.replace('\', '\\')
$SCRYPTED_HOME = $env:USERPROFILE + '\.scrypted'
@@ -49,10 +34,7 @@ $SCRYPTED_HOME_ESCAPED_PATH = $SCRYPTED_HOME.replace('\', '\\')
npm install --prefix $SCRYPTED_HOME @koush/node-windows --save
$NPX_PATH = (Get-Command npx).Path
# The path needs double quotes to handle spaces in the directory path
$NPX_PATH_ESCAPED = '"' + $NPX_PATH.replace('\', '\\') + '"'
# On newer versions of NPM, the NPX might be a .ps1 file which doesn't work with child_process.spawn, change to .cmd
$NPX_PATH_ESCAPED = $NPX_PATH_ESCAPED.replace('.ps1', '.cmd')
$NPX_PATH_ESCAPED = $NPX_PATH.replace('\', '\\')
$SERVICE_JS = @"
const fs = require('fs');
@@ -62,12 +44,8 @@ try {
catch (e) {
}
const child_process = require('child_process');
child_process.spawn('$NPX_PATH_ESCAPED', ['-y', 'scrypted', 'serve'], {
child_process.spawn('$($NPX_PATH_ESCAPED)', ['-y', 'scrypted', 'serve'], {
stdio: 'inherit',
// allow spawning .cmd https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
shell: true,
}).on('error', (err) => {
console.error('Error spawning child process', err);
});
"@
@@ -113,9 +91,6 @@ svc.on("install", () => {
svc.on("start", () => {
console.log("Service started");
});
svc.on("error", (err) => {
console.log("Service error", err);
});
svc.install();
"@

View File

@@ -0,0 +1,84 @@
function readyn() {
while true; do
read -p "$1 (y/n) " yn
case $yn in
[Yy]* ) break;;
[Nn]* ) break;;
* ) echo "Please answer yes or no. (y/n)";;
esac
done
}
cd /tmp
SCRYPTED_VERSION=v0.93.0
SCRYPTED_TAR_ZST=scrypted-$SCRYPTED_VERSION.tar.zst
if [ -z "$VMID" ]
then
VMID=10443
fi
echo "Downloading scrypted container backup."
if [ ! -f "$SCRYPTED_TAR_ZST" ]
then
curl -O -L https://github.com/koush/scrypted/releases/download/$SCRYPTED_VERSION/scrypted.tar.zst
mv scrypted.tar.zst $SCRYPTED_TAR_ZST
fi
echo "Checking for existing container."
pct config $VMID
if [ "$?" == "0" ]
then
echo ""
echo "Existing container $VMID found. Run this script with --force to overwrite the existing container."
echo "This will wipe all existing data. Clone the existing container to retain the data, then reassign the owner of the scrypted volume after installation is complete."
echo ""
echo "bash $0 --force"
echo ""
fi
pct restore $VMID $SCRYPTED_TAR_ZST $@
if [ "$?" != "0" ]
then
echo ""
echo "pct restore failed"
echo ""
echo "This may be caused by the server's 'local' storage not supporting containers."
echo "Try running this script again with a different storage device (local-lvm, local-zfs). For example:"
echo ""
echo "bash $0 --storage local-lvm"
echo ""
exit 1
fi
pct set $VMID -net0 name=eth0,bridge=vmbr0,ip=dhcp,ip6=auto
if [ "$?" != "0" ]
then
echo ""
echo "pct set network failed"
echo ""
echo "Ignoring... Please verify your container's network settings."
fi
CONF=/etc/pve/lxc/$VMID.conf
if [ -f "$CONF" ]
then
echo "onboot: 1" >> $CONF
else
echo "$CONF not found? Start on boot must be enabled manually."
fi
echo "Adding udev rule: /etc/udev/rules.d/65-scrypted.rules"
readyn "Add udev rule for hardware acceleration? This may conflict with existing rules."
if [ "$yn" == "y" ]
then
sh -c "echo 'SUBSYSTEM==\"apex\", MODE=\"0666\"' > /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'KERNEL==\"renderD128\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'KERNEL==\"card0\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"usb\", ATTRS{idVendor}==\"1a6e\", ATTRS{idProduct}==\"089a\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"usb\", ATTRS{idVendor}==\"18d1\", ATTRS{idProduct}==\"9302\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
udevadm control --reload-rules && udevadm trigger
fi
echo "Scrypted setup is complete and the container resources can be started."
echo "Scrypted NVR users should provide at least 4 cores and 16GB RAM prior to starting."

View File

@@ -1,18 +0,0 @@
#!/bin/bash
cd /root/.scrypted
# always immediately upgrade everything in case there's a broken update.
# this will also be preferable for troubleshooting via lxc reboot.
export DEBIAN_FRONTEND=noninteractive
yes | dpkg --configure -a
apt -y --fix-broken install && apt -y update && apt -y dist-upgrade
# force a pull to ensure we have the latest images.
# not using --pull always cause that fails everything on network down
docker compose pull
# do not daemonize, when it exits, systemd will restart it.
# force a recreate as .env may have changed.
# furthermore force recreate gets the container back into a known state
# which is preferable in case the user has made manual changes and then restarts.
WATCHTOWER_HTTP_API_TOKEN=$(echo $RANDOM | md5sum | head -c 32) docker compose up --force-recreate --abort-on-container-exit

View File

@@ -1,302 +0,0 @@
PCT=$(which pct)
if [ -z "$PCT" ]
then
echo "pct command not found. This script must be run on the Proxmox host, not a container."
echo "Installation Documentation: https://docs.scrypted.app/installation.html#proxmox-ve"
exit 1
fi
function readyn() {
while true; do
read -p "$1 (y/n) " yn
case $yn in
[Yy]* ) break;;
[Nn]* ) break;;
* ) echo "Please answer yes or no. (y/n)";;
esac
done
}
cd /tmp
SCRYPTED_VERSION=v0.120.0
SCRYPTED_TAR_ZST=scrypted-$SCRYPTED_VERSION.tar.zst
if [ -z "$VMID" ]
then
VMID=10443
fi
SCRYPTED_BACKUP_VMID=10445
function prepareScryptedRestore() {
pct config $VMID 2>&1 > /dev/null
if [ "$?" != "0" ]
then
echo "VMID $VMID not found."
exit 1
fi
# append existing mac address.
HWADDR=",hwaddr=$(pct config $VMID | grep -oE 'hwaddr=[A-Z0-9:]+' | cut -d '=' -f 2)"
RESTORE_HOSTNAME=$(pct config $VMID | grep -oE 'hostname: [^[:space:]]+' | cut -d ':' -f 2- | tr -d ' ')
pct destroy $SCRYPTED_BACKUP_VMID 2>&1 > /dev/null
RESTORE_VMID=$VMID
VMID=$SCRYPTED_BACKUP_VMID
pct destroy $VMID 2>&1 > /dev/null
}
if [ -n "$SCRYPTED_RESTORE" ]
then
prepareScryptedRestore
fi
echo "Downloading scrypted container backup."
if [ ! -f "$SCRYPTED_TAR_ZST" ]
then
curl -O -L https://github.com/koush/scrypted/releases/download/$SCRYPTED_VERSION/scrypted.tar.zst
mv scrypted.tar.zst $SCRYPTED_TAR_ZST
fi
if [[ "$@" =~ "--force" ]]
then
IGNORE_EXISTING=true
fi
if [ -n "$SCRYPTED_RESTORE" ]
then
IGNORE_EXISTING=true
fi
if [ -z "$IGNORE_EXISTING" ]
then
echo "Checking for existing container."
pct config $VMID
if [ "$?" == "0" ]
then
echo ""
echo "==============================================================="
echo "Existing container $VMID found."
echo "==============================================================="
echo ""
echo "This script can be used ro reinstall Scrypted and reset the container to a factory state."
echo "This preserves existing data. Creating a backup within Scrypted is highly recommended in case the reset fails."
echo "THIS WILL WIPE ADDITIONAL VOLUMES SUCH AS NVR STORAGE. NVR volumes will need to be readded after the restore:"
readyn "Reinstall Scrypted and and retain existing configuration?"
if [ "$yn" != "y" ]
then
echo ""
echo "1. To reinstall and reset Scrypted, run this script with --force to overwrite the existing container."
echo "THIS WILL WIPE THE EXISTING CONFIGURATION:"
echo ""
echo "VMID=$VMID bash $0 --force"
echo ""
echo "2. To reinstall Scrypted and and retain existing configuration, run this script with the environment variable SCRYPTED_RESTORE=true."
echo "This preserves existing data. Creating a backup within Scrypted is highly recommended in case the reset fails."
echo "THIS WILL WIPE ADDITIONAL VOLUMES SUCH AS NVR STORAGE. NVR volumes will need to be readded after the restore:"
echo ""
echo "SCRYPTED_RESTORE=true VMID=$VMID bash $0"
echo ""
echo "3. To install and run multiple Scrypted containers, run this script with the environment variable specifying"
echo "the new VMID=<number>. For example, to create a new LXC with VMID 12345:"
echo ""
echo "VMID=12345 bash $0"
exit 1
fi
SCRYPTED_RESTORE=true
prepareScryptedRestore
fi
fi
if [[ ! "$@" =~ "--storage" ]]
then
HAS_LOCAL_LVM=$(pvesm status | grep local-lvm | grep active)
HAS_LOCAL_ZFS=$(pvesm status | grep local-zfs | grep active)
if [ ! -z "$HAS_LOCAL_LVM" ]
then
RESTORE_STORAGE="--storage local-lvm"
elif [ ! -z "$HAS_LOCAL_ZFS" ]
then
RESTORE_STORAGE="--storage local-zfs"
else
echo "Could not determine a valid storage device. One may need to be specified manually."
fi
fi
pct stop $VMID 2>&1 > /dev/null
pct restore $VMID $SCRYPTED_TAR_ZST $RESTORE_STORAGE $@
if [ "$?" != "0" ]
then
echo ""
echo "The Scrypted container installation failed (pct restore error)."
echo ""
echo "This may be because the server's 'local' storage device is not being a valid"
echo "location for containers."
echo "Try running this script again with a different storage device like"
echo "'local-lvm' or 'local-zfs'."
echo ""
echo "#############################################################################"
echo -e "\033[32mPaste the following command into this shell to install to local-lvm instead:\033[0m"
echo ""
echo "bash $0 --storage local-lvm"
echo "#############################################################################"
echo ""
echo ""
exit 1
fi
pct set $VMID -net0 name=eth0,bridge=vmbr0,ip=dhcp,ip6=auto$HWADDR
if [ "$?" != "0" ]
then
echo ""
echo "pct set network failed"
echo ""
echo "Ignoring... Please verify your container's network settings."
fi
if [ -n "$RESTORE_HOSTNAME" ]
then
pct set $VMID --hostname $RESTORE_HOSTNAME
if [ "$?" != "0" ]
then
echo ""
echo "pct hostname restore failed"
echo ""
echo "Ignoring... Please verify your container's dns settings."
fi
fi
CONF=/etc/pve/lxc/$VMID.conf
if [ -f "$CONF" ]
then
echo "onboot: 1" >> $CONF
else
echo "$CONF not found? Start on boot must be enabled manually."
fi
if [ -n "$SCRYPTED_RESTORE" ]
then
echo ""
echo ""
echo "This script will reset the Scrypted container to a factory state while preserving existing data."
echo "IT IS RECOMMENDED TO CREATE A BACKUP INSIDE SCRYPTED FIRST."
readyn "Are you sure you want to continue?"
if [ "$yn" != "y" ]
then
exit 1
fi
echo "Stopping scrypted..."
pct stop $RESTORE_VMID 2>&1 > /dev/null
echo "Preparing rootfs reset..."
# remove the empty data volume from the downloaded image.
pct set $SCRYPTED_BACKUP_VMID --delete mp0 && pct set $SCRYPTED_BACKUP_VMID --delete unused0
if [ "$?" != "0" ]
then
echo "Failed to remove data volume from image."
exit 1
fi
# create a backup that contains only the root disk.
rm -f *.tar
vzdump $SCRYPTED_BACKUP_VMID --dumpdir /tmp
# this moves the data volume from the current scrypted instance to the backup target to preserve it during
# the restore.
pct move-volume $RESTORE_VMID mp0 --target-vmid $SCRYPTED_BACKUP_VMID --target-volume mp0
if [ "$?" != "0" ]
then
echo "Failed to move data volume to backup."
exit 1
fi
# arguments: from to mp hide-warning
function move_volume() {
HAS_VOLUME=$(pct config $1 | grep $3:)
if [ -n "$HAS_VOLUME" ]
then
echo "Moving $3..."
# this may error and there may be recording loss. bailing at ths point is already too late.
pct move-volume $1 $3 --target-vmid $2 --target-volume $3
# volume must be inside /mnt to get into docker container
INSIDE_MNT=$(echo $HAS_VOLUME | grep /mnt)
if [ -z "$INSIDE_MNT" -a -z "$4" ]
then
echo "##################################################################"
echo "The following mount point is not visible to the"
echo "Scrypted docker container within the LXC:"
echo ""
echo "$HAS_VOLUME"
echo ""
echo "This recordings directory will be unavailable."
echo "The mount point must be updated to a path within /mnt."
echo "https://docs.scrypted.app/scrypted-nvr/recording-storage.html#proxmox-ve-mount-point"
echo "##################################################################"
fi
fi
}
# try moving 5 volumes, any more than that seems unlikely
move_volume $RESTORE_VMID $SCRYPTED_BACKUP_VMID mp1 hide-warning
move_volume $RESTORE_VMID $SCRYPTED_BACKUP_VMID mp2 hide-warning
move_volume $RESTORE_VMID $SCRYPTED_BACKUP_VMID mp3 hide-warning
move_volume $RESTORE_VMID $SCRYPTED_BACKUP_VMID mp4 hide-warning
move_volume $RESTORE_VMID $SCRYPTED_BACKUP_VMID mp5 hide-warning
VMID=$RESTORE_VMID
echo "Restoring with reset image..."
pct restore --force 1 $VMID *.tar $RESTORE_STORAGE $@
echo "Restoring volumes..."
move_volume $SCRYPTED_BACKUP_VMID $VMID mp0 hide-warning
move_volume $SCRYPTED_BACKUP_VMID $VMID mp1
move_volume $SCRYPTED_BACKUP_VMID $VMID mp2
move_volume $SCRYPTED_BACKUP_VMID $VMID mp3
move_volume $SCRYPTED_BACKUP_VMID $VMID mp4
move_volume $SCRYPTED_BACKUP_VMID $VMID mp5
pct destroy $SCRYPTED_BACKUP_VMID
fi
echo "Enabling startup on boot..."
pct set $VMID -onboot 1
readyn "Add udev rule for hardware acceleration? This may conflict with existing rules."
if [ "$yn" == "y" ]
then
echo "Adding udev rule: /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"apex\", MODE=\"0666\"' > /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"drm\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"kfd\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"accel\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"usb\", ATTRS{idVendor}==\"1a6e\", ATTRS{idProduct}==\"089a\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
sh -c "echo 'SUBSYSTEM==\"usb\", ATTRS{idVendor}==\"18d1\", ATTRS{idProduct}==\"9302\", MODE=\"0666\"' >> /etc/udev/rules.d/65-scrypted.rules"
udevadm control --reload-rules && udevadm trigger
fi
# check if intel
INTEL=$(cat /proc/cpuinfo | grep GenuineIntel)
if [ ! -z "$INTEL" ]
then
readyn "Install intel-microcode package? This will update your CPU and GPU firmware."
if [ "$yn" == "y" ]
then
echo "Installing intel-microcode..."
# remove it first to allow reinsertion
sed -i 's/main contrib non-free-firmware/main/g' /etc/apt/sources.list
sed -i 's/main/main contrib non-free-firmware/g' /etc/apt/sources.list
apt update
apt install -y intel-microcode
echo "#############################"
echo "System Reboot is recommended."
echo "#############################"
fi
fi
echo "Scrypted setup is complete and the container resources can be started."
echo ""
echo "Scrypted NVR servers should run the disk setup script in the documentation to add storage prior to starting the container."

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

View File

@@ -1,74 +0,0 @@
#!/bin/bash
NVR_STORAGE=$1
NVR_STORAGE_DIRECTORY=$2
DISK_TYPE="large"
if [ ! -z "$FAST_DISK" ]
then
DISK_TYPE="fast"
fi
if [ -z "$NVR_STORAGE" ]; then
echo ""
echo "Error: Directory name not provided. Usage:"
echo ""
echo "bash $0 directory-name [/optional/path/to/storage]"
echo ""
exit 1
fi
if [ -z "$VMID" ]
then
VMID="10443"
fi
FILE="/etc/pve/lxc/$VMID.conf"
# valdiate file exists
if [ ! -f "$FILE" ]; then
echo "Error: $FILE not found."
echo "If the Scrypted container id is not 10443, please set the VMID environment variable prior to running this script."
exit 1
fi
if [ ! -z "$NVR_STORAGE_DIRECTORY" ]
then
if [ ! -d "$NVR_STORAGE_DIRECTORY" ]
then
echo ""
echo "Error: $NVR_STORAGE_DIRECTORY directory not found."
echo ""
exit 1
fi
else
STORAGE="/mnt/pve/$NVR_STORAGE"
if [ ! -d "$STORAGE" ]
then
echo "Error: $STORAGE not found."
echo "The Proxmox Directory Storage must be created using the UI prior to running this script."
exit 1
fi
# use subdirectory doesn't conflict with Proxmox storage of backups etc.
NVR_STORAGE_DIRECTORY="$STORAGE/mounts/scrypted-nvr"
fi
# create the hidden folder that can be used as a marker.
mkdir -p $NVR_STORAGE_DIRECTORY/.nvr
chmod 0777 $NVR_STORAGE_DIRECTORY
echo "Stopping Scrypted..."
pct stop "$VMID"
echo "Modifying $FILE."
if [ -z "$ADD_DISK" ]
then
echo "Removing previous $DISK_TYPE lxc.mount.entry."
sed -i "/mnt\/nvr\/$DISK_TYPE/d" "$FILE"
fi
echo "Adding new $DISK_TYPE lxc.mount.entry."
echo "lxc.mount.entry: $NVR_STORAGE_DIRECTORY mnt/nvr/$DISK_TYPE/$NVR_STORAGE none bind,optional,create=dir" >> "$FILE"
echo "Starting Scrypted..."
pct start $VMID

View File

@@ -1,4 +1,10 @@
#!/bin/bash
echo 'if (!process.version.startsWith("v18")) throw new Error("Node 18 is required. Install Node Version Manager (nvm) for versioned node installations. See https://github.com/koush/scrypted/pull/498#issuecomment-1373854020")' | node
if [ "$?" != 0 ]
then
exit
fi
echo ######################################
echo "Setting up popular plugins."
echo "Additional will need npm install manually."
@@ -9,7 +15,7 @@ cd $(dirname $0)
git submodule init
git submodule update
for directory in sdk server common packages/client packages/auth-fetch
for directory in sdk common server packages/client packages/auth-fetch
do
echo "$directory > npm install"
pushd $directory
@@ -27,7 +33,7 @@ echo "external/werift > npm install"
npm install
popd
for directory in rtsp ffmpeg-camera amcrest onvif hikvision reolink unifi-protect webrtc homekit
for directory in ffmpeg-camera rtsp amcrest onvif hikvision unifi-protect webrtc homekit
do
echo "$directory > npm install"
pushd plugins/$directory

View File

@@ -1,4 +1,4 @@
import { HttpFetchOptions, HttpFetchResponseType, checkStatus, createHeadersArray, fetcher, getFetchMethod, hasHeader, setDefaultHttpFetchAccept, setHeader } from '../../../server/src/fetch';
import { HttpFetchOptions, HttpFetchResponseType, checkStatus, fetcher, getFetchMethod, setDefaultHttpFetchAccept } from '../../../server/src/fetch';
export interface AuthFetchCredentialState {
username: string;
@@ -70,54 +70,36 @@ async function getAuth(options: AuthFetchOptions, url: string | URL, method: str
export function createAuthFetch<B, M>(
h: fetcher<B, M>,
parser: (body: M, responseType: HttpFetchResponseType | undefined) => Promise<any>
parser: (body: M, responseType: HttpFetchResponseType) => Promise<any>
) {
const authHttpFetch = async <T extends HttpFetchOptions<B>>(options: T & AuthFetchOptions): ReturnType<typeof h<T>> => {
const method = getFetchMethod(options);
const headers = createHeadersArray(options.headers);
const headers = new Headers(options.headers);
options.headers = headers;
setDefaultHttpFetchAccept(headers, options.responseType);
const initialHeader = await getAuth(options, options.url, method);
// try to provide an authorization if a session exists, but don't override Authorization if provided already.
// 401 will trigger a proper auth.
if (initialHeader && !hasHeader(headers, 'Authorization'))
setHeader(headers, 'Authorization', initialHeader);
const controller = new AbortController();
options.signal?.addEventListener('abort', () => controller.abort(options.signal?.reason));
if (initialHeader && !headers.has('Authorization'))
headers.set('Authorization', initialHeader);
const initialResponse = await h({
...options,
signal: controller.signal,
// need to intercept the status code to check for 401.
// all other status codes will be handled according to the initial request options.
checkStatusCode(statusCode) {
// can handle a 401 if an credential is provided.
// however, not providing a credential is also valid, and should
// fall through to the normal response handling which may be interested
// in the 401 response.
if (statusCode === 401 && options.credential)
return true;
if (options?.checkStatusCode === undefined || options?.checkStatusCode) {
const checker = typeof options?.checkStatusCode === 'function' ? options.checkStatusCode : checkStatus;
return checker(statusCode);
}
return true;
},
ignoreStatusCode: true,
responseType: 'readable',
});
// if it's not a 401, just return the response.
if (initialResponse.statusCode !== 401) {
if (initialResponse.statusCode !== 401 || !options.credential) {
if (!options?.ignoreStatusCode)
checkStatus(initialResponse.statusCode);
return {
...initialResponse,
body: await parser(initialResponse.body, options.responseType),
};
}
let authenticateHeaders: string | string[] | null = initialResponse.headers.get('www-authenticate');
let authenticateHeaders: string | string[] = initialResponse.headers.get('www-authenticate');
if (!authenticateHeaders)
throw new Error('Did not find WWW-Authenticate header.');
@@ -144,7 +126,7 @@ export function createAuthFetch<B, M>(
const header = await getAuth(options, options.url, method);
if (header)
setHeader(headers, 'Authorization', header);
headers.set('Authorization', header);
return h(options);
}

Some files were not shown because too many files have changed in this diff Show More