global: remove rtsp_transport flags from everywhere. it will be handled in rebroadcast.

This commit is contained in:
Koushik Dutta
2022-04-12 00:00:38 -07:00
parent 751cc2a238
commit 8b9778afb9
13 changed files with 127 additions and 118 deletions

View File

@@ -178,6 +178,15 @@ export function parseMSection(msection: string[]) {
codec = 'h264';
}
let direction: string;
for (const checkDirection of ['sendonly' , 'sendrecv', 'recvonly' , 'inactive']) {
const found = msection.find(line => line === 'a=' + checkDirection);
if (found) {
direction = checkDirection;
break;
}
}
return {
...parseMLine(msection[0]),
fmtp: parseFmtp(msection),
@@ -185,6 +194,7 @@ export function parseMSection(msection: string[]) {
contents: msection.join('\r\n'),
control,
codec,
direction,
}
}

View File

@@ -58,7 +58,7 @@ class AmcrestCamera extends RtspSmartCamera implements VideoCameraConfiguration,
const url = `rtsp://${this.getRtspAddress()}/cam/playback?channel=1&starttime=${year}_${month}_${date}_${hours}_${minutes}_${seconds}`;
const authedUrl = this.addRtspCredentials(url);
return this.createFfmpegMediaObject(authedUrl, undefined);
return this.createMediaStreamUrl(authedUrl, undefined);
}
getRecordingStreamOptions(): Promise<MediaStreamOptions[]> {

View File

@@ -1,4 +1,4 @@
import sdk, { DeviceManifest, DeviceProvider, HttpRequest, HttpRequestHandler, HttpResponse, HumiditySensor, MediaObject, MotionSensor, OauthClient, Refresh, ScryptedDeviceType, ScryptedInterface, Setting, Settings, TemperatureSetting, TemperatureUnit, Thermometer, ThermostatMode, VideoCamera, MediaStreamOptions, BinarySensor, DeviceInformation, RTCAVSignalingSetup, Camera, PictureOptions, ObjectsDetected, ObjectDetector, ObjectDetectionTypes, FFmpegInput, RequestMediaStreamOptions, Readme, RTCSignalingChannel, RTCSessionControl, RTCSignalingSession, ResponseMediaStreamOptions, RTCSignalingOptions, RTCSignalingSendIceCandidate } from '@scrypted/sdk';
import sdk, { DeviceManifest, DeviceProvider, HttpRequest, HttpRequestHandler, HttpResponse, HumiditySensor, MediaObject, MotionSensor, OauthClient, Refresh, ScryptedDeviceType, ScryptedInterface, Setting, Settings, TemperatureSetting, TemperatureUnit, Thermometer, ThermostatMode, VideoCamera, MediaStreamOptions, BinarySensor, DeviceInformation, RTCAVSignalingSetup, Camera, PictureOptions, ObjectsDetected, ObjectDetector, ObjectDetectionTypes, FFmpegInput, RequestMediaStreamOptions, Readme, RTCSignalingChannel, RTCSessionControl, RTCSignalingSession, ResponseMediaStreamOptions, RTCSignalingOptions, RTCSignalingSendIceCandidate, ScryptedMimeTypes, MediaStreamUrl } from '@scrypted/sdk';
import { ScryptedDeviceBase } from '@scrypted/sdk';
import qs from 'query-string';
import ClientOAuth2 from 'client-oauth2';
@@ -268,14 +268,12 @@ class NestCamera extends ScryptedDeviceBase implements Readme, Camera, VideoCame
}
createFFmpegMediaObject(trackerId: string, url: string) {
return mediaManager.createFFmpegMediaObject({
const ret: MediaStreamUrl = {
url,
mediaStreamOptions: this.addRefreshOptions(trackerId, getSdmRtspMediaStreamOptions()),
inputArguments: [
"-rtsp_transport", "tcp",
"-i", url,
],
});
};
return this.createMediaObject(ret, ScryptedMimeTypes.MediaStreamUrl);
}
get isWebRtc() {

View File

@@ -1,7 +1,7 @@
import sdk, { MediaObject, Intercom, FFmpegInput, ScryptedMimeTypes } from "@scrypted/sdk";
import { RtspSmartCamera } from "../../rtsp/src/rtsp";
import { parseSemicolonDelimited, RtspClient } from "@scrypted/common/src/rtsp-server";
import { findTrack } from "@scrypted/common/src/sdp-utils";
import { parseSdp } from "@scrypted/common/src/sdp-utils";
import { ffmpegLogInitialOutput, safePrintFFmpegArguments } from "@scrypted/common/src/media-helpers";
import child_process from 'child_process';
@@ -84,7 +84,8 @@ export class OnvifIntercom implements Intercom {
});
this.camera.console.log('ONVIF Backchannel SDP:');
this.camera.console.log(describe.body?.toString());
const audioBackchannel = findTrack(describe.body.toString(), 'audio', ['sendonly']);
const parsedSdp = parseSdp(describe.body.toString());
const audioBackchannel = parsedSdp.msections.find(msection => msection.type === 'audio' && msection.direction === 'sendonly');
if (!audioBackchannel)
throw new Error('ONVIF audio backchannel not found');
@@ -96,7 +97,7 @@ export class OnvifIntercom implements Intercom {
Transport: `RTP/AVP;unicast;client_port=${rtp}-${rtcp}`,
};
const response = await this.intercomClient.request('SETUP', headers, audioBackchannel.trackId);
const response = await this.intercomClient.request('SETUP', headers, audioBackchannel.control);
const transportDict = parseSemicolonDelimited(response.headers.transport);
this.intercomClient.session = response.headers.session.split(';')[0];
@@ -108,7 +109,7 @@ export class OnvifIntercom implements Intercom {
const ffmpegInput = await mediaManager.convertMediaObjectToJSON<FFmpegInput>(media, ScryptedMimeTypes.FFmpegInput);
const availableCodecs = [...parseCodecs(audioBackchannel.section)];
const availableCodecs = [...parseCodecs(audioBackchannel.contents)];
let match: CodecMatch;
let codec: SupportedCodec;
for (const supported of availableCodecs) {

View File

@@ -182,7 +182,7 @@ class PrebufferSession {
getParser(rtspMode: boolean, muxingMp4: boolean, mediaStreamOptions: MediaStreamOptions) {
if (!this.canUseRtspParser(muxingMp4, mediaStreamOptions))
return FFMPEG_PARSER_TCP;
return STRING_DEFAULT;
const defaultValue = rtspMode
&& mediaStreamOptions?.tool === 'scrypted' ?

View File

@@ -127,6 +127,7 @@ export async function startRFC4571Parser(console: Console, socket: Readable, sdp
try {
const parsedSps = spsParse(sps);
inputVideoResolution = getSpsResolution(parsedSps);
console.log(inputVideoResolution);
console.log('parsed bitstream sps', parsedSps);
}
catch (e) {

View File

@@ -2,9 +2,9 @@ import { closeQuiet, createBindZero, listenZeroSingleClient } from '@scrypted/co
import { RefreshPromise } from "@scrypted/common/src/promise-utils";
import { connectRTCSignalingClients } from '@scrypted/common/src/rtc-connect';
import { RtspServer } from '@scrypted/common/src/rtsp-server';
import { addTrackControls, replacePorts } from '@scrypted/common/src/sdp-utils';
import { addTrackControls, parseSdp, replacePorts } from '@scrypted/common/src/sdp-utils';
import { StorageSettings } from '@scrypted/common/src/settings';
import sdk, { BinarySensor, Camera, Device, DeviceDiscovery, DeviceProvider, FFmpegInput, Intercom, MediaObject, MotionSensor, OnOff, PictureOptions, RequestMediaStreamOptions, RequestPictureOptions, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera } from '@scrypted/sdk';
import sdk, { BinarySensor, Camera, Device, DeviceDiscovery, DeviceProvider, FFmpegInput, Intercom, MediaObject, MediaStreamUrl, MotionSensor, OnOff, PictureOptions, RequestMediaStreamOptions, RequestPictureOptions, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, Settings, SettingValue, VideoCamera } from '@scrypted/sdk';
import child_process, { ChildProcess } from 'child_process';
import dgram from 'dgram';
import { RtcpReceiverInfo, RtcpRrPacket } from '../../../external/werift/packages/rtp/src/rtcp/rr';
@@ -16,8 +16,7 @@ import { encodeSrtpOptions, getPayloadType, getSequenceNumber, isRtpMessagePaylo
enum CaptureModes {
Default = 'Default',
UDP = 'RTSP+UDP',
TCP = 'RTSP+TCP',
RTSP = 'RTSP',
FFmpeg = 'FFmpeg Direct Capture',
}
@@ -82,7 +81,8 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
rtpDescription: RtpDescription;
audioOutForwarder: dgram.Socket;
audioOutProcess: ChildProcess;
ffmpegInput: FFmpegInput;
currentMedia: FFmpegInput|MediaStreamUrl;
currentMediaMimeType: string;
refreshTimeout: NodeJS.Timeout;
picturePromise: RefreshPromise<Buffer>;
@@ -162,14 +162,17 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
async getVideoStream(options?: RequestMediaStreamOptions): Promise<MediaObject> {
if (options?.refreshAt) {
if (!this.ffmpegInput?.mediaStreamOptions)
if (options?.metadata?.refreshAt) {
if (!this.currentMedia?.mediaStreamOptions)
throw new Error("no stream to refresh");
const ffmpegInput = this.ffmpegInput;
ffmpegInput.mediaStreamOptions.refreshAt = Date.now() + STREAM_TIMEOUT;
const currentMedia = this.currentMedia;
currentMedia.mediaStreamOptions.refreshAt = Date.now() + STREAM_TIMEOUT;
currentMedia.mediaStreamOptions.metadata = {
refreshAt: currentMedia.mediaStreamOptions.refreshAt
};
this.resetStreamTimeout();
return mediaManager.createMediaObject(Buffer.from(JSON.stringify(ffmpegInput)), ScryptedMimeTypes.FFmpegInput);
return mediaManager.createMediaObject(currentMedia, this.currentMediaMimeType);
}
this.stopSession();
@@ -178,7 +181,6 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
const { clientPromise: playbackPromise, port: playbackPort, url: clientUrl } = await listenZeroSingleClient();
const useRtsp = this.storageSettings.values.captureMode !== CaptureModes.FFmpeg;
const useRtspTcp = this.storageSettings.values.captureMode === CaptureModes.TCP;
const playbackUrl = useRtsp ? `rtsp://127.0.0.1:${playbackPort}` : clientUrl;
@@ -225,9 +227,10 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
if (useRtsp) {
const rtsp = new RtspServer(client, sdp, udp);
const parsedSdp = parseSdp(rtsp.sdp);
const videoTrack = parsedSdp.msections.find(msection => msection.type === 'video').control;
const audioTrack = parsedSdp.msections.find(msection => msection.type === 'audio').control;
rtsp.console = this.console;
rtsp.audioChannel = 0;
rtsp.videoChannel = 2;
await rtsp.handlePlayback();
sip.videoSplitter.on('message', message => {
@@ -236,7 +239,7 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
if (!isRtpMessage)
return;
vseen++;
rtsp.sendVideo(message, !isRtpMessage);
rtsp.sendTrack(videoTrack, message, !isRtpMessage);
const seq = getSequenceNumber(message);
if (seq !== (vseq + 1) % 0x0FFFF)
vlost++;
@@ -245,7 +248,7 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
});
sip.videoRtcpSplitter.on('message', message => {
rtsp.sendVideo(message, true);
rtsp.sendTrack(videoTrack, message, true);
});
sip.videoSplitter.once('message', message => {
@@ -294,7 +297,7 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
if (!isRtpMessage)
return;
aseen++;
rtsp.sendAudio(message, !isRtpMessage);
rtsp.sendTrack(audioTrack, message, !isRtpMessage);
const seq = getSequenceNumber(message);
if (seq !== (aseq + 1) % 0x0FFFF)
alost++;
@@ -303,7 +306,7 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
});
sip.audioRtcpSplitter.on('message', message => {
rtsp.sendAudio(message, true);
rtsp.sendTrack(audioTrack, message, true);
});
sip.requestKeyFrame();
@@ -344,22 +347,34 @@ class RingCameraDevice extends ScryptedDeviceBase implements Intercom, Settings,
}
});
this.resetStreamTimeout();
const mediaStreamOptions = Object.assign(this.getSipMediaStreamOptions(), {
refreshAt: Date.now() + STREAM_TIMEOUT,
});
if (useRtsp) {
const mediaStreamUrl: MediaStreamUrl = {
url: playbackUrl,
mediaStreamOptions,
};
this.currentMedia = mediaStreamUrl;
this.currentMediaMimeType = ScryptedMimeTypes.MediaStreamUrl;
return mediaManager.createMediaObject(mediaStreamUrl, ScryptedMimeTypes.MediaStreamUrl);
}
const ffmpegInput: FFmpegInput = {
url: playbackUrl,
mediaStreamOptions: Object.assign(this.getSipMediaStreamOptions(), {
refreshAt: Date.now() + STREAM_TIMEOUT,
}),
mediaStreamOptions,
inputArguments: [
...(useRtsp
? ['-rtsp_transport', useRtspTcp ? 'tcp' : 'udp']
: ['-f', 'sdp']),
'-f', 'sdp',
'-i', playbackUrl,
],
};
this.ffmpegInput = ffmpegInput;
this.resetStreamTimeout();
this.currentMedia = ffmpegInput;
this.currentMediaMimeType = ScryptedMimeTypes.FFmpegInput;
return mediaManager.createMediaObject(Buffer.from(JSON.stringify(ffmpegInput)), ScryptedMimeTypes.FFmpegInput);
return mediaManager.createFFmpegMediaObject(ffmpegInput);
}
getSipMediaStreamOptions(): ResponseMediaStreamOptions {

View File

@@ -1,4 +1,4 @@
import sdk, { Setting, MediaObject, ScryptedInterface, FFmpegInput, PictureOptions, SettingValue, MediaStreamOptions, ResponseMediaStreamOptions, ScryptedMimeTypes } from "@scrypted/sdk";
import sdk, { Setting, MediaObject, ScryptedInterface, FFmpegInput, PictureOptions, SettingValue, MediaStreamOptions, ResponseMediaStreamOptions, ScryptedMimeTypes, MediaStreamUrl } from "@scrypted/sdk";
import { EventEmitter } from "stream";
import { CameraProviderBase, CameraBase, UrlMediaStreamOptions } from "../../ffmpeg-camera/src/common";
import url from 'url';
@@ -71,17 +71,13 @@ export class RtspCamera extends CameraBase<UrlMediaStreamOptions> {
return stringUrl;
}
createFfmpegMediaObject(stringUrl: string, vso: ResponseMediaStreamOptions) {
const ret: FFmpegInput = {
createMediaStreamUrl(stringUrl: string, vso: ResponseMediaStreamOptions) {
const ret: MediaStreamUrl = {
url: stringUrl,
inputArguments: [
"-rtsp_transport", this.getRtspTransport(),
"-i", stringUrl,
],
mediaStreamOptions: vso,
};
return this.createMediaObject(ret, ScryptedMimeTypes.FFmpegInput);
return this.createMediaObject(ret, ScryptedMimeTypes.MediaStreamUrl);
}
async createVideoStream(vso: UrlMediaStreamOptions): Promise<MediaObject> {
@@ -89,7 +85,7 @@ export class RtspCamera extends CameraBase<UrlMediaStreamOptions> {
throw new Error('video streams not set up or no longer exists.');
const stringUrl = this.addRtspCredentials(vso.url);
return this.createFfmpegMediaObject(stringUrl, vso);
return this.createMediaStreamUrl(stringUrl, vso);
}
// hide the description from CameraBase that indicates it is only used for snapshots
@@ -115,28 +111,8 @@ export class RtspCamera extends CameraBase<UrlMediaStreamOptions> {
];
}
async getRtspTransportSettings(): Promise<Setting[]> {
return [
{
key: 'rtspTransport',
title: 'RTSP Transport',
group: 'Advanced',
description: 'The RTSP Transport to use when streaming video. TCP is the default.',
value: this.getRtspTransport(),
choices: [
'tcp',
'udp',
],
},
]
}
getRtspTransport() {
return this.storage.getItem('rtspTransport') || 'tcp'
}
async getOtherSettings(): Promise<Setting[]> {
return this.getRtspTransportSettings();
return [];
}
async getUrlSettings(): Promise<Setting[]> {

View File

@@ -1,8 +1,8 @@
import sdk, { ScryptedDeviceBase, DeviceProvider, HttpRequest, HttpRequestHandler, HttpResponse, Settings, Setting, ScryptedDeviceType, VideoCamera, MediaObject, Device, MotionSensor, ScryptedInterface, Camera, MediaStreamOptions, PictureOptions, ResponseMediaStreamOptions, ScryptedMimeTypes } from "@scrypted/sdk";
import sdk, { Camera, Device, DeviceProvider, HttpRequest, HttpRequestHandler, HttpResponse, MediaObject, MediaStreamOptions, MediaStreamUrl, MotionSensor, PictureOptions, ResponseMediaStreamOptions, ScryptedDeviceBase, ScryptedDeviceType, ScryptedInterface, ScryptedMimeTypes, Setting, Settings, VideoCamera } from "@scrypted/sdk";
import { createInstanceableProviderPlugin, enableInstanceableProviderMode, isInstanceableProviderModeEnabled } from '../../../common/src/provider-plugin';
import { SynologyApiClient, SynologyCameraStream, SynologyCamera } from "./api/synology-api-client";
import { SynologyApiClient, SynologyCamera, SynologyCameraStream } from "./api/synology-api-client";
const { deviceManager, mediaManager } = sdk;
const { deviceManager } = sdk;
class SynologyCameraDevice extends ScryptedDeviceBase implements Camera, HttpRequestHandler, MotionSensor, Settings, VideoCamera {
private static readonly DefaultSensorTimeoutSecs: number = 30;
@@ -90,14 +90,11 @@ class SynologyCameraDevice extends ScryptedDeviceBase implements Camera, HttpReq
rtspPath = liveViewPaths[0].rtspPath;
}
return this.createMediaObject({
const mediaStreamUrl: MediaStreamUrl = {
url: rtspPath,
inputArguments: [
"-rtsp_transport", "tcp",
"-i", rtspPath,
],
mediaStreamOptions: this.createMediaStreamOptions(rtspChannel),
}, ScryptedMimeTypes.FFmpegInput);
}
return this.createMediaObject(mediaStreamUrl, ScryptedMimeTypes.MediaStreamUrl);
}
private createMediaStreamOptions(stream: SynologyCameraStream) {

View File

@@ -3,7 +3,7 @@ import { closeQuiet, createBindZero, listenZeroSingleClient } from "@scrypted/co
import { safeKillFFmpeg } from "@scrypted/common/src/media-helpers";
import { connectRTCSignalingClients } from "@scrypted/common/src/rtc-connect";
import { RtspServer } from "@scrypted/common/src/rtsp-server";
import { createSdpInput, findFmtp } from "@scrypted/common/src/sdp-utils";
import { createSdpInput, findFmtp, parseSdp } from "@scrypted/common/src/sdp-utils";
import { StorageSettings } from "@scrypted/common/src/settings";
import sdk, { FFmpegInput, Intercom, MediaStreamDestination, RTCAVSignalingSetup, RTCSignalingSession } from "@scrypted/sdk";
import { ChildProcess } from "child_process";
@@ -151,9 +151,13 @@ export async function createRTCPeerConnectionSink(
const rtspServer = new RtspServer(client, sdp, audioOutput.server);
// rtspServer.console = console;
await rtspServer.handlePlayback();
const parsedSdp = parseSdp(rtspServer.sdp);
const audioTrack = parsedSdp.msections.find(msection => msection.type === 'audio').control;
track.onReceiveRtp.subscribe(rtpPacket => {
rtpPacket.header.payloadType = 110;
rtspServer.sendAudio(rtpPacket.serialize(), false);
rtspServer.sendTrack(audioTrack, rtpPacket.serialize(), false);
})
}
catch (e) {

View File

@@ -97,14 +97,14 @@ class WebRTCMixin extends SettingsMixinDeviceBase<VideoCamera & RTCSignalingChan
return this.mixinDevice.getVideoStream(options);
}
const { ffmpegInput } = await createRTCPeerConnectionSource({
const { mediaObject } = await createRTCPeerConnectionSource({
console: this.console,
mediaStreamOptions: this.createVideoStreamOptions(),
channel: this.mixinDevice,
useUdp: this.storageSettings.values.useUdp,
});
return mediaManager.createFFmpegMediaObject(ffmpegInput);
return mediaObject;
}
async getVideoStreamOptions(): Promise<ResponseMediaStreamOptions[]> {
@@ -115,7 +115,6 @@ class WebRTCMixin extends SettingsMixinDeviceBase<VideoCamera & RTCSignalingChan
ret.push(this.createVideoStreamOptions());
return ret;
}
}
class WebRTCPlugin extends AutoenableMixinProvider implements DeviceCreator, DeviceProvider, BufferConverter, MixinProvider, Settings {

View File

@@ -27,7 +27,7 @@ export class WebRTCCamera extends ScryptedDeviceBase implements VideoCamera, RTC
async getVideoStream(options?: RequestMediaStreamOptions): Promise<MediaObject> {
const mediaStreamOptions = getRTCMediaStreamOptions('webrtc', 'WebRTC', true);
const { ffmpegInput, intercom } = await createRTCPeerConnectionSource({
const { mediaObject, intercom } = await createRTCPeerConnectionSource({
console: this.console,
mediaStreamOptions,
channel: this,
@@ -37,7 +37,7 @@ export class WebRTCCamera extends ScryptedDeviceBase implements VideoCamera, RTC
this.intercom?.then(intercom => intercom.stopIntercom());
this.intercom = intercom;
return mediaManager.createFFmpegMediaObject(ffmpegInput);
return mediaObject;
}
async getVideoStreamOptions(): Promise<ResponseMediaStreamOptions[]> {

View File

@@ -3,8 +3,8 @@ import { FullIntraRequest } from "@koush/werift/lib/rtp/src/rtcp/psfb/fullIntraR
import { listenZeroSingleClient } from "@scrypted/common/src/listen-cluster";
import { safeKillFFmpeg } from "@scrypted/common/src/media-helpers";
import { RtspServer } from "@scrypted/common/src/rtsp-server";
import { createSdpInput } from '@scrypted/common/src/sdp-utils';
import sdk, { FFmpegInput, Intercom, MediaObject, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingOptions, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedMimeTypes } from "@scrypted/sdk";
import { createSdpInput, parseSdp } from '@scrypted/common/src/sdp-utils';
import sdk, { FFmpegInput, Intercom, MediaObject, MediaStreamUrl, ResponseMediaStreamOptions, RTCAVSignalingSetup, RTCSessionControl, RTCSignalingChannel, RTCSignalingOptions, RTCSignalingSendIceCandidate, RTCSignalingSession, ScryptedMimeTypes } from "@scrypted/sdk";
import { ChildProcess } from "child_process";
import dgram from 'dgram';
import { Socket } from "net";
@@ -15,7 +15,7 @@ import { createRawResponse, isPeerConnectionAlive } from "./werift-util";
const { mediaManager } = sdk;
export interface RTCPeerConnectionPipe {
ffmpegInput: FFmpegInput;
mediaObject: MediaObject;
intercom: Promise<Intercom>;
}
@@ -55,8 +55,6 @@ export async function createRTCPeerConnectionSource(options: {
socket = client;
const rtspServer = new RtspServer(socket, undefined, udp);
// rtspServer.console = console;
rtspServer.audioChannel = 0;
rtspServer.videoChannel = 2;
const pc = new RTCPeerConnection({
codecs: {
@@ -115,15 +113,16 @@ export async function createRTCPeerConnectionSource(options: {
audioTransceiver = pc.addTransceiver("audio", setup.audio as any);
audioTransceiver.onTrack.subscribe((track) => {
const audioTrack = parseSdp(rtspServer.sdp).msections.find(msection => msection.type === 'audio').control;
if (useUdp) {
track.onReceiveRtp.subscribe(rtp => {
if (!gotAudio) {
gotAudio = true;
console.log('received first audio packet');
}
rtspServer.sendAudio(rtp.serialize(), false);
rtspServer.sendTrack(audioTrack, rtp.serialize(), false);
});
track.onReceiveRtcp.subscribe(rtp => rtspServer.sendAudio(rtp.serialize(), true));
track.onReceiveRtcp.subscribe(rtp => rtspServer.sendTrack(audioTrack, rtp.serialize(), true));
}
else {
const jitter = new JitterBuffer({
@@ -133,7 +132,7 @@ export async function createRTCPeerConnectionSource(options: {
class RtspOutput extends Output {
pushRtcpPackets(packets: RtcpPacket[]): void {
for (const rtcp of packets) {
rtspServer.sendAudio(rtcp.serialize(), true)
rtspServer.sendTrack(audioTrack, rtcp.serialize(), true)
}
}
pushRtpPackets(packets: RtpPacket[]): void {
@@ -142,7 +141,7 @@ export async function createRTCPeerConnectionSource(options: {
console.log('received first audio packet');
}
for (const rtp of packets) {
rtspServer.sendAudio(rtp.serialize(), false);
rtspServer.sendTrack(audioTrack, rtp.serialize(), false);
}
}
}
@@ -152,15 +151,16 @@ export async function createRTCPeerConnectionSource(options: {
const videoTransceiver = pc.addTransceiver("video", setup.video as any);
videoTransceiver.onTrack.subscribe((track) => {
const videoTrack = parseSdp(rtspServer.sdp).msections.find(msection => msection.type === 'video').control;
if (useUdp) {
track.onReceiveRtp.subscribe(rtp => {
if (!gotVideo) {
gotVideo = true;
console.log('received first video packet');
}
rtspServer.sendVideo(rtp.serialize(), false);
rtspServer.sendTrack(videoTrack, rtp.serialize(), false);
});
track.onReceiveRtcp.subscribe(rtp => rtspServer.sendVideo(rtp.serialize(), true));
track.onReceiveRtcp.subscribe(rtp => rtspServer.sendTrack(videoTrack, rtp.serialize(), true));
}
else {
const jitter = new JitterBuffer({
@@ -170,7 +170,7 @@ export async function createRTCPeerConnectionSource(options: {
class RtspOutput extends Output {
pushRtcpPackets(packets: RtcpPacket[]): void {
for (const rtcp of packets) {
rtspServer.sendVideo(rtcp.serialize(), true)
rtspServer.sendTrack(videoTrack, rtcp.serialize(), true)
}
}
pushRtpPackets(packets: RtpPacket[]): void {
@@ -179,7 +179,7 @@ export async function createRTCPeerConnectionSource(options: {
console.log('received first video packet');
}
for (const rtp of packets) {
rtspServer.sendVideo(rtp.serialize(), false);
rtspServer.sendTrack(videoTrack, rtp.serialize(), false);
}
}
}
@@ -222,9 +222,21 @@ export async function createRTCPeerConnectionSource(options: {
console.log('sdp sent', rtspServer.sdp);
if (useUdp) {
rtspServer.udpPorts = {
video: videoPort,
audio: audioPort,
const parsedSdp = parseSdp(rtspServer.sdp);
const videoTrack = parsedSdp.msections.find(msection => msection.type === 'video').control;
const audioTrack = parsedSdp.msections.find(msection => msection.type === 'audio').control;
rtspServer.setupTracks[videoTrack] = {
protocol: 'udp',
destination: videoPort,
codec: undefined,
control: videoTrack,
};
rtspServer.setupTracks[audioTrack] = {
protocol: 'udp',
destination: audioPort,
codec: undefined,
control: audioTrack,
};
rtspServer.client.write(rtspServer.sdp + '\r\n');
rtspServer.client.end();
@@ -330,7 +342,7 @@ export async function createRTCPeerConnectionSource(options: {
const { cp } = await startRtpForwarderProcess(console, ffmpegInput.inputArguments, {
audio: {
outputArguments: getFFmpegRtpAudioOutputArguments(),
outputArguments: getFFmpegRtpAudioOutputArguments(ffmpegInput.mediaStreamOptions?.audio?.codec),
transceiver: audioTransceiver,
},
});
@@ -348,12 +360,11 @@ export async function createRTCPeerConnectionSource(options: {
return ret;
});
let ffmpegInput: FFmpegInput;
if (useUdp) {
const url = `tcp://127.0.0.1:${port}`;
mediaStreamOptions.container = 'sdp';
ffmpegInput = {
const ffmpegInput: FFmpegInput = {
url,
mediaStreamOptions,
inputArguments: [
@@ -379,28 +390,25 @@ export async function createRTCPeerConnectionSource(options: {
'-i', url,
]
};
return {
mediaObject: await mediaManager.createFFmpegMediaObject(ffmpegInput),
intercom,
};
}
else {
const url = `rtsp://127.0.0.1:${port}`;
ffmpegInput = {
const mediaStreamUrl : MediaStreamUrl = {
url,
mediaStreamOptions,
inputArguments: [
"-rtsp_transport", "tcp",
// see above for udp comments.
// unclear what this does in tcp. out of order packets in a tcp
// stream probably breaks things.
// should possibly use the werift jitter buffer in tcp mode to accomodate.
// "-max_delay", "0",
'-i', url,
]
};
return {
mediaObject: await mediaManager.createMediaObject(mediaStreamUrl, ScryptedMimeTypes.MediaStreamUrl),
intercom,
};
}
return {
ffmpegInput,
intercom,
}
}
interface ReceivedRtpPacket extends RtpPacket {