mirror of
https://github.com/koush/scrypted.git
synced 2026-02-08 00:12:13 +00:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5de67fca86 | ||
|
|
98dc0b1b6d | ||
|
|
a05595ecc7 | ||
|
|
87be4648f1 | ||
|
|
60e51adb41 |
4
plugins/objectdetector/package-lock.json
generated
4
plugins/objectdetector/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.130",
|
||||
"version": "0.0.132",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.130",
|
||||
"version": "0.0.132",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scrypted/common": "file:../../common",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/objectdetector",
|
||||
"version": "0.0.130",
|
||||
"version": "0.0.132",
|
||||
"description": "Scrypted Video Analysis Plugin. Installed alongside a detection service like OpenCV or TensorFlow.",
|
||||
"author": "Scrypted",
|
||||
"license": "Apache-2.0",
|
||||
@@ -35,6 +35,7 @@
|
||||
"name": "Video Analysis Plugin",
|
||||
"type": "API",
|
||||
"interfaces": [
|
||||
"DeviceProvider",
|
||||
"Settings",
|
||||
"MixinProvider"
|
||||
],
|
||||
|
||||
169
plugins/objectdetector/src/ffmpeg-videoframes-no-sharp.ts
Normal file
169
plugins/objectdetector/src/ffmpeg-videoframes-no-sharp.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { Deferred } from "@scrypted/common/src/deferred";
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
|
||||
import { readLength, readLine } from "@scrypted/common/src/read-stream";
|
||||
import sdk, { FFmpegInput, Image, ImageFormat, ImageOptions, MediaObject, ScryptedDeviceBase, ScryptedMimeTypes, VideoFrame, VideoFrameGenerator, VideoFrameGeneratorOptions } from "@scrypted/sdk";
|
||||
import child_process from 'child_process';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
|
||||
interface RawFrame {
|
||||
width: number;
|
||||
height: number;
|
||||
data: Buffer;
|
||||
}
|
||||
|
||||
async function createRawImageMediaObject(image: RawImage): Promise<VideoFrame & MediaObject> {
|
||||
const ret = await sdk.mediaManager.createMediaObject(image, ScryptedMimeTypes.Image, {
|
||||
format: null,
|
||||
timestamp: 0,
|
||||
width: image.width,
|
||||
height: image.height,
|
||||
queued: 0,
|
||||
toBuffer: (options: ImageOptions) => image.toBuffer(options),
|
||||
toImage: (options: ImageOptions) => image.toImage(options),
|
||||
flush: async () => { },
|
||||
});
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
class RawImage implements Image, RawFrame {
|
||||
constructor(public data: Buffer, public width: number, public height: number, public format: ImageFormat) {
|
||||
}
|
||||
|
||||
checkOptions(options: ImageOptions) {
|
||||
if (options?.resize || options?.crop)
|
||||
throw new Error('resize and crop are not supported');
|
||||
if (options?.format && options?.format !== this.format)
|
||||
throw new Error('format not supported');
|
||||
}
|
||||
|
||||
async toBuffer(options: ImageOptions) {
|
||||
this.checkOptions(options);
|
||||
return this.data;
|
||||
}
|
||||
|
||||
async toImage(options: ImageOptions) {
|
||||
this.checkOptions(options);
|
||||
return createRawImageMediaObject(this);
|
||||
}
|
||||
}
|
||||
|
||||
export class FFmpegVideoFrameGenerator extends ScryptedDeviceBase implements VideoFrameGenerator {
|
||||
async *generateVideoFramesInternal(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): AsyncGenerator<VideoFrame & MediaObject, any, unknown> {
|
||||
const ffmpegInput = await sdk.mediaManager.convertMediaObjectToJSON<FFmpegInput>(mediaObject, ScryptedMimeTypes.FFmpegInput);
|
||||
const gray = options?.format === 'gray';
|
||||
const channels = gray ? 1 : 3;
|
||||
const format: ImageFormat = gray ? 'gray' : 'rgb';
|
||||
const vf: string[] = [];
|
||||
if (options?.fps)
|
||||
vf.push(`fps=${options.fps}`);
|
||||
if (options.resize)
|
||||
vf.push(`scale=${options.resize.width}:${options.resize.height}`);
|
||||
const args = [
|
||||
'-hide_banner',
|
||||
//'-hwaccel', 'auto',
|
||||
...ffmpegInput.inputArguments,
|
||||
'-vcodec', 'pam',
|
||||
'-pix_fmt', gray ? 'gray' : 'rgb24',
|
||||
...vf.length ? [
|
||||
'-vf',
|
||||
vf.join(','),
|
||||
] : [],
|
||||
'-f', 'image2pipe',
|
||||
'pipe:3',
|
||||
];
|
||||
|
||||
// this seems to reduce latency.
|
||||
// addVideoFilterArguments(args, 'fps=10', 'fps');
|
||||
|
||||
const cp = child_process.spawn(await sdk.mediaManager.getFFmpegPath(), args, {
|
||||
stdio: ['pipe', 'pipe', 'pipe', 'pipe'],
|
||||
});
|
||||
const console = mediaObject?.sourceId ? sdk.deviceManager.getMixinConsole(mediaObject.sourceId) : this.console;
|
||||
safePrintFFmpegArguments(console, args);
|
||||
ffmpegLogInitialOutput(console, cp);
|
||||
|
||||
let finished = false;
|
||||
let frameDeferred: Deferred<RawFrame>;
|
||||
|
||||
const reader = async () => {
|
||||
try {
|
||||
|
||||
const readable = cp.stdio[3] as Readable;
|
||||
const headers = new Map<string, string>();
|
||||
while (!finished) {
|
||||
const line = await readLine(readable);
|
||||
if (line !== 'ENDHDR') {
|
||||
const [key, value] = line.split(' ');
|
||||
headers[key] = value;
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
if (headers['TUPLTYPE'] !== 'RGB' && headers['TUPLTYPE'] !== 'GRAYSCALE')
|
||||
throw new Error(`Unexpected TUPLTYPE in PAM stream: ${headers['TUPLTYPE']}`);
|
||||
|
||||
const width = parseInt(headers['WIDTH']);
|
||||
const height = parseInt(headers['HEIGHT']);
|
||||
if (!width || !height)
|
||||
throw new Error('Invalid dimensions in PAM stream');
|
||||
|
||||
const length = width * height * channels;
|
||||
headers.clear();
|
||||
const data = await readLength(readable, length);
|
||||
|
||||
if (frameDeferred) {
|
||||
const f = frameDeferred;
|
||||
frameDeferred = undefined;
|
||||
f.resolve({
|
||||
width,
|
||||
height,
|
||||
data,
|
||||
});
|
||||
}
|
||||
else {
|
||||
// this.console.warn('skipped frame');
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
finally {
|
||||
console.log('finished reader');
|
||||
finished = true;
|
||||
frameDeferred?.reject(new Error('frame generator finished'));
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
reader();
|
||||
while (!finished) {
|
||||
frameDeferred = new Deferred();
|
||||
const raw = await frameDeferred.promise;
|
||||
const { width, height, data } = raw;
|
||||
|
||||
const rawImage = new RawImage(data, width, height, format);
|
||||
try {
|
||||
const mo = await createRawImageMediaObject(rawImage);
|
||||
yield mo;
|
||||
}
|
||||
finally {
|
||||
rawImage.data = undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
}
|
||||
finally {
|
||||
console.log('finished generator');
|
||||
finished = true;
|
||||
safeKillFFmpeg(cp);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async generateVideoFrames(mediaObject: MediaObject, options?: VideoFrameGeneratorOptions, filter?: (videoFrame: VideoFrame & MediaObject) => Promise<boolean>): Promise<AsyncGenerator<VideoFrame & MediaObject, any, unknown>> {
|
||||
return this.generateVideoFramesInternal(mediaObject, options, filter);
|
||||
}
|
||||
}
|
||||
@@ -33,11 +33,13 @@ async function createVipsMediaObject(image: VipsImage): Promise<VideoFrame & Med
|
||||
timestamp: 0,
|
||||
width: image.width,
|
||||
height: image.height,
|
||||
queued: 0,
|
||||
toBuffer: (options: ImageOptions) => image.toBuffer(options),
|
||||
toImage: async (options: ImageOptions) => {
|
||||
const newImage = await image.toVipsImage(options);
|
||||
return createVipsMediaObject(newImage);
|
||||
}
|
||||
},
|
||||
flush: async () => {},
|
||||
});
|
||||
|
||||
return ret;
|
||||
|
||||
@@ -5,9 +5,9 @@ import { StorageSettings } from '@scrypted/sdk/storage-settings';
|
||||
import crypto from 'crypto';
|
||||
import { AutoenableMixinProvider } from "../../../common/src/autoenable-mixin-provider";
|
||||
import { SettingsMixinDeviceBase } from "../../../common/src/settings-mixin";
|
||||
// import { FFmpegVideoFrameGenerator, sharpLib } from './ffmpeg-videoframes';
|
||||
import { serverSupportsMixinEventMasking } from './server-version';
|
||||
import { getAllDevices, safeParseJson } from './util';
|
||||
import { FFmpegVideoFrameGenerator } from './ffmpeg-videoframes-no-sharp';
|
||||
|
||||
const polygonOverlap = require('polygon-overlap');
|
||||
const insidePolygon = require('point-inside-polygon');
|
||||
@@ -363,6 +363,7 @@ class ObjectDetectionMixin extends SettingsMixinDeviceBase<VideoCamera & Camera
|
||||
});
|
||||
|
||||
frameGenerator = await videoFrameGenerator.generateVideoFrames(stream, {
|
||||
queue: 0,
|
||||
resize: this.model?.inputSize ? {
|
||||
width: this.model.inputSize[0],
|
||||
height: this.model.inputSize[1],
|
||||
@@ -955,25 +956,25 @@ class ObjectDetectionPlugin extends AutoenableMixinProvider implements Settings,
|
||||
constructor(nativeId?: ScryptedNativeId) {
|
||||
super(nativeId);
|
||||
|
||||
// process.nextTick(() => {
|
||||
// sdk.deviceManager.onDevicesChanged({
|
||||
// devices: [
|
||||
// {
|
||||
// name: 'FFmpeg Frame Generator',
|
||||
// type: ScryptedDeviceType.Builtin,
|
||||
// interfaces: sharpLib ? [
|
||||
// ScryptedInterface.VideoFrameGenerator,
|
||||
// ] : [],
|
||||
// nativeId: 'ffmpeg',
|
||||
// }
|
||||
// ]
|
||||
// })
|
||||
// })
|
||||
process.nextTick(() => {
|
||||
sdk.deviceManager.onDevicesChanged({
|
||||
devices: [
|
||||
{
|
||||
name: 'FFmpeg Frame Generator',
|
||||
type: ScryptedDeviceType.Builtin,
|
||||
interfaces: [
|
||||
ScryptedInterface.VideoFrameGenerator,
|
||||
],
|
||||
nativeId: 'ffmpeg',
|
||||
}
|
||||
]
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async getDevice(nativeId: string): Promise<any> {
|
||||
// if (nativeId === 'ffmpeg')
|
||||
// return new FFmpegVideoFrameGenerator('ffmpeg');
|
||||
if (nativeId === 'ffmpeg')
|
||||
return new FFmpegVideoFrameGenerator('ffmpeg');
|
||||
}
|
||||
|
||||
async releaseDevice(id: string, nativeId: string): Promise<void> {
|
||||
|
||||
4
plugins/pam-diff/package-lock.json
generated
4
plugins/pam-diff/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/pam-diff",
|
||||
"version": "0.0.20",
|
||||
"version": "0.0.21",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/pam-diff",
|
||||
"version": "0.0.20",
|
||||
"version": "0.0.21",
|
||||
"hasInstallScript": true,
|
||||
"dependencies": {
|
||||
"@types/node": "^16.6.1",
|
||||
|
||||
@@ -43,5 +43,5 @@
|
||||
"devDependencies": {
|
||||
"@scrypted/sdk": "file:../../sdk"
|
||||
},
|
||||
"version": "0.0.20"
|
||||
"version": "0.0.21"
|
||||
}
|
||||
|
||||
@@ -1,54 +1,15 @@
|
||||
import sdk, { FFmpegInput, MediaObject, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionGeneratorResult, ObjectDetectionGeneratorSession, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionSession, ObjectsDetected, ScryptedDeviceBase, ScryptedInterface, ScryptedMimeTypes, VideoFrame } from '@scrypted/sdk';
|
||||
import child_process, { ChildProcess } from 'child_process';
|
||||
import { ffmpegLogInitialOutput, safeKillFFmpeg, safePrintFFmpegArguments } from "../../../common/src/media-helpers";
|
||||
import sdk, { MediaObject, ObjectDetection, ObjectDetectionCallbacks, ObjectDetectionGeneratorResult, ObjectDetectionGeneratorSession, ObjectDetectionModel, ObjectDetectionResult, ObjectDetectionSession, ObjectsDetected, ScryptedDeviceBase, VideoFrame } from '@scrypted/sdk';
|
||||
|
||||
import PD from 'pam-diff';
|
||||
import P2P from 'pipe2pam';
|
||||
import { PassThrough, Writable } from 'stream';
|
||||
|
||||
const { mediaManager } = sdk;
|
||||
|
||||
const defaultDifference = 9;
|
||||
const defaultPercentage = 2;
|
||||
|
||||
interface PamDiffSession {
|
||||
id: string;
|
||||
timeout?: NodeJS.Timeout;
|
||||
cp?: ChildProcess;
|
||||
pamDiff?: any;
|
||||
callbacks: ObjectDetectionCallbacks;
|
||||
}
|
||||
|
||||
class PamDiff extends ScryptedDeviceBase implements ObjectDetection {
|
||||
sessions = new Map<string, PamDiffSession>();
|
||||
|
||||
endSession(id: string) {
|
||||
const pds = this.sessions.get(id);
|
||||
if (!pds)
|
||||
return;
|
||||
this.sessions.delete(pds.id);
|
||||
const event: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
running: false,
|
||||
detectionId: pds.id,
|
||||
}
|
||||
clearTimeout(pds.timeout);
|
||||
safeKillFFmpeg(pds.cp);
|
||||
if (pds.callbacks) {
|
||||
pds.callbacks.onDetectionEnded(event);
|
||||
}
|
||||
else {
|
||||
this.onDeviceEvent(ScryptedInterface.ObjectDetection, event);
|
||||
}
|
||||
}
|
||||
|
||||
reschedule(id: string, duration: number,) {
|
||||
const pds = this.sessions.get(id);
|
||||
if (!pds)
|
||||
return;
|
||||
clearTimeout(pds.timeout);
|
||||
pds.timeout = setTimeout(() => this.endSession(id), duration);
|
||||
}
|
||||
|
||||
async * generateObjectDetectionsInternal(videoFrames: AsyncGenerator<VideoFrame, any, unknown>, session: ObjectDetectionGeneratorSession): AsyncGenerator<ObjectDetectionGeneratorResult, any, unknown> {
|
||||
videoFrames = await sdk.connectRPCObject(videoFrames);
|
||||
@@ -92,7 +53,6 @@ class PamDiff extends ScryptedDeviceBase implements ObjectDetection {
|
||||
}
|
||||
const event: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
running: true,
|
||||
inputDimensions: [width, height],
|
||||
detections,
|
||||
}
|
||||
@@ -111,10 +71,10 @@ ENDHDR
|
||||
`;
|
||||
|
||||
const buffer = await videoFrame.toBuffer({
|
||||
resize: {
|
||||
resize: (videoFrame.width !== width || videoFrame.height !== height) ? {
|
||||
width,
|
||||
height,
|
||||
},
|
||||
} : undefined,
|
||||
format: 'rgb',
|
||||
});
|
||||
pt.write(Buffer.from(header));
|
||||
@@ -146,157 +106,7 @@ ENDHDR
|
||||
}
|
||||
|
||||
async detectObjects(mediaObject: MediaObject, session?: ObjectDetectionSession, callbacks?: ObjectDetectionCallbacks): Promise<ObjectsDetected> {
|
||||
if (mediaObject && mediaObject.mimeType?.startsWith('image/'))
|
||||
throw new Error('can not run motion detection on image')
|
||||
|
||||
let { detectionId } = session;
|
||||
let pds = this.sessions.get(detectionId);
|
||||
if (pds)
|
||||
pds.callbacks = callbacks;
|
||||
|
||||
if (!session?.duration) {
|
||||
this.endSession(detectionId);
|
||||
return {
|
||||
detectionId,
|
||||
running: false,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
if (pds) {
|
||||
this.reschedule(detectionId, session.duration);
|
||||
pds.pamDiff.setDifference(session.settings?.difference || defaultDifference).setPercent(session.settings?.percent || defaultPercentage);
|
||||
return {
|
||||
detectionId,
|
||||
running: true,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
// unable to start/extend this session.
|
||||
if (!mediaObject) {
|
||||
this.endSession(detectionId);
|
||||
return {
|
||||
detectionId,
|
||||
running: false,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
const ffmpeg = await mediaManager.getFFmpegPath();
|
||||
const ffmpegInput: FFmpegInput = JSON.parse((await mediaManager.convertMediaObjectToBuffer(
|
||||
mediaObject,
|
||||
ScryptedMimeTypes.FFmpegInput
|
||||
)).toString());
|
||||
|
||||
pds = {
|
||||
id: detectionId,
|
||||
callbacks,
|
||||
}
|
||||
this.reschedule(detectionId, session.duration);
|
||||
|
||||
const args = ffmpegInput.inputArguments.slice();
|
||||
args.unshift(
|
||||
'-hide_banner',
|
||||
...ffmpegInput.videoDecoderArguments || [],
|
||||
)
|
||||
args.push(
|
||||
'-an', '-dn',
|
||||
'-c:v',
|
||||
'pam',
|
||||
'-pix_fmt',
|
||||
'rgb24',
|
||||
'-f',
|
||||
'image2pipe',
|
||||
'-vf',
|
||||
`fps=2,scale=640:360`,
|
||||
'pipe:3',
|
||||
);
|
||||
|
||||
const p2p = new P2P();
|
||||
const pamDiff = new PD({
|
||||
difference: session.settings?.difference || defaultDifference,
|
||||
percent: session.settings?.percent || defaultPercentage,
|
||||
response: session?.settings?.motionAsObjects ? 'blobs' : 'percent',
|
||||
});
|
||||
|
||||
pamDiff.on('diff', async (data: any) => {
|
||||
const trigger = data.trigger[0];
|
||||
// console.log(trigger.blobs.length);
|
||||
const { blobs } = trigger;
|
||||
|
||||
const detections: ObjectDetectionResult[] = [];
|
||||
if (blobs?.length) {
|
||||
for (const blob of blobs) {
|
||||
detections.push(
|
||||
{
|
||||
className: 'motion',
|
||||
score: 1,
|
||||
boundingBox: [blob.minX, blob.minY, blob.maxX - blob.minX, blob.maxY - blob.minY],
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
else {
|
||||
detections.push(
|
||||
{
|
||||
className: 'motion',
|
||||
score: 1,
|
||||
}
|
||||
)
|
||||
}
|
||||
const event: ObjectsDetected = {
|
||||
timestamp: Date.now(),
|
||||
running: true,
|
||||
detectionId: pds.id,
|
||||
inputDimensions: [640, 360],
|
||||
detections,
|
||||
}
|
||||
if (pds.callbacks) {
|
||||
pds.callbacks.onDetection(event);
|
||||
}
|
||||
else {
|
||||
this.onDeviceEvent(ScryptedInterface.ObjectDetection, event);
|
||||
}
|
||||
});
|
||||
|
||||
const console = sdk.deviceManager.getMixinConsole(mediaObject.sourceId, this.nativeId);
|
||||
|
||||
pds.pamDiff = pamDiff;
|
||||
pds.pamDiff
|
||||
.setDifference(session.settings?.difference || defaultDifference)
|
||||
.setPercent(session.settings?.percent || defaultPercentage)
|
||||
.setResponse(session?.settings?.motionAsObjects ? 'blobs' : 'percent');;
|
||||
safePrintFFmpegArguments(console, args);
|
||||
pds.cp = child_process.spawn(ffmpeg, args, {
|
||||
stdio: ['inherit', 'pipe', 'pipe', 'pipe']
|
||||
});
|
||||
let pamTimeout: NodeJS.Timeout;
|
||||
const resetTimeout = () => {
|
||||
clearTimeout(pamTimeout);
|
||||
pamTimeout = setTimeout(() => {
|
||||
const check = this.sessions.get(detectionId);
|
||||
if (check !== pds)
|
||||
return;
|
||||
console.error('PAM image stream timed out. Ending session.');
|
||||
this.endSession(detectionId);
|
||||
}, 60000);
|
||||
}
|
||||
p2p.on('data', () => {
|
||||
resetTimeout();
|
||||
})
|
||||
resetTimeout();
|
||||
pds.cp.stdio[3].pipe(p2p as any).pipe(pamDiff as any);
|
||||
pds.cp.on('exit', () => this.endSession(detectionId));
|
||||
ffmpegLogInitialOutput(console, pds.cp);
|
||||
|
||||
this.sessions.set(detectionId, pds);
|
||||
|
||||
return {
|
||||
detectionId,
|
||||
running: true,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
throw new Error('can not run motion detection on image')
|
||||
}
|
||||
|
||||
async getDetectionModel(): Promise<ObjectDetectionModel> {
|
||||
|
||||
4
server/package-lock.json
generated
4
server/package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@scrypted/server",
|
||||
"version": "0.7.76",
|
||||
"version": "0.7.81",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@scrypted/server",
|
||||
"version": "0.7.76",
|
||||
"version": "0.7.81",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@mapbox/node-pre-gyp": "^1.0.10",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@scrypted/server",
|
||||
"version": "0.7.76",
|
||||
"version": "0.7.81",
|
||||
"description": "",
|
||||
"dependencies": {
|
||||
"@mapbox/node-pre-gyp": "^1.0.10",
|
||||
|
||||
@@ -422,7 +422,7 @@ class PluginRemote:
|
||||
peer, peerReadLoop = await rpc_reader.prepare_peer_readloop(self.loop, rpcTransport)
|
||||
peer.onProxySerialization = lambda value, proxyId: onProxySerialization(
|
||||
value, proxyId, clusterPeerPort)
|
||||
future = asyncio.Future[rpc.RpcPeer]()
|
||||
future: asyncio.Future[rpc.RpcPeer] = asyncio.Future()
|
||||
future.set_result(peer)
|
||||
clusterPeers[clusterPeerPort] = future
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
async function main() {
|
||||
const response = await fetch('https://registry.npmjs.org/@scrypted/server');
|
||||
const json = await response.json();
|
||||
console.log(json['dist-tags'][process.argv[2]]);
|
||||
// const packageJson = require('../package.json');
|
||||
// console.log(packageJson.version);
|
||||
// console.log(json['dist-tags'][process.argv[2]]);
|
||||
const packageJson = require('../package.json');
|
||||
console.log(packageJson.version);
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
Reference in New Issue
Block a user