homebridge#CameraStreamingDelegate TypeScript Examples
The following examples show how to use
homebridge#CameraStreamingDelegate.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: streaming-delegate.ts From homebridge-nest-cam with GNU General Public License v3.0 | 4 votes |
export class StreamingDelegate implements CameraStreamingDelegate {
private readonly hap: HAP;
private readonly log: Logging;
private readonly config: NestConfig;
private customFfmpeg: string | undefined;
private videoProcessor: string;
private ffmpegCodec = 'libx264';
private ffmpegInstalled = true;
private ffmpegSupportsLibfdk_acc = true;
private ffmpegSupportsLibspeex = true;
private camera: NestCam;
controller?: CameraController;
// keep track of sessions
private pendingSessions: Record<string, SessionInfo> = {};
private ongoingSessions: Record<string, Array<FfmpegProcess | undefined>> = {};
private ongoingStreams: Record<string, NexusStreamer> = {};
constructor(hap: HAP, camera: NestCam, config: NestConfig, log: Logging) {
this.hap = hap;
this.log = log;
this.config = config;
this.camera = camera;
this.customFfmpeg = config.options?.pathToFfmpeg;
this.videoProcessor = this.customFfmpeg || pathToFfmpeg || 'ffmpeg';
// Get the correct video codec
getCodecsOutput(this.videoProcessor)
.then((output) => {
const codec = config.options?.ffmpegCodec;
if (codec === 'copy' || (codec && output.includes(codec))) {
this.ffmpegCodec = codec;
} else {
this.log.error(`Unknown video codec ${codec}. Defaulting to libx264.`);
}
this.ffmpegSupportsLibfdk_acc = output.includes('libfdk_aac');
this.ffmpegSupportsLibspeex = output.includes('libspeex');
})
.catch(() => {
// skip
});
// Check if ffmpeg is installed
isFfmpegInstalled(this.videoProcessor)
.then((installed) => {
this.ffmpegInstalled = installed;
})
.catch(() => {
// skip
});
}
private getOfflineImage(callback: SnapshotRequestCallback): void {
const log = this.log;
readFile(join(__dirname, `../images/offline.jpg`), (err, data) => {
if (err) {
log.error(err.message);
callback(err);
} else {
callback(undefined, data);
}
});
}
handleSnapshotRequest(request: SnapshotRequest, callback: SnapshotRequestCallback): void {
if (this.camera.info.properties['streaming.enabled']) {
this.camera
.getSnapshot(request.height)
.then((snapshot) => {
callback(undefined, snapshot);
})
.catch((error) => {
handleError(this.log, error, `Error fetching snapshot for ${this.camera.info.name}`);
callback(error);
});
} else {
this.getOfflineImage(callback);
}
}
async prepareStream(request: PrepareStreamRequest, callback: PrepareStreamCallback): Promise<void> {
const sessionId: StreamSessionIdentifier = request.sessionID;
const targetAddress = request.targetAddress;
//video setup
const video = request.video;
const videoPort = video.port;
const returnVideoPort = (await reservePorts())[0];
const videoCryptoSuite = video.srtpCryptoSuite;
const videoSrtpKey = video.srtp_key;
const videoSrtpSalt = video.srtp_salt;
const videoSSRC = this.hap.CameraController.generateSynchronisationSource();
//audio setup
const audio = request.audio;
const audioPort = audio.port;
const returnAudioPort = (await reservePorts())[0];
const twoWayAudioPort = (await reservePorts(2))[0];
const audioServerPort = (await reservePorts())[0];
const audioCryptoSuite = video.srtpCryptoSuite;
const audioSrtpKey = audio.srtp_key;
const audioSrtpSalt = audio.srtp_salt;
const audioSSRC = this.hap.CameraController.generateSynchronisationSource();
const sessionInfo: SessionInfo = {
address: targetAddress,
videoPort: videoPort,
returnVideoPort: returnVideoPort,
videoCryptoSuite: videoCryptoSuite,
videoSRTP: Buffer.concat([videoSrtpKey, videoSrtpSalt]),
videoSSRC: videoSSRC,
audioPort: audioPort,
returnAudioPort: returnAudioPort,
twoWayAudioPort: twoWayAudioPort,
rtpSplitter: new RtpSplitter(audioServerPort, returnAudioPort, twoWayAudioPort),
audioCryptoSuite: audioCryptoSuite,
audioSRTP: Buffer.concat([audioSrtpKey, audioSrtpSalt]),
audioSSRC: audioSSRC,
};
const response: PrepareStreamResponse = {
video: {
port: returnVideoPort,
ssrc: videoSSRC,
srtp_key: videoSrtpKey,
srtp_salt: videoSrtpSalt,
},
audio: {
port: audioServerPort,
ssrc: audioSSRC,
srtp_key: audioSrtpKey,
srtp_salt: audioSrtpSalt,
},
};
this.pendingSessions[sessionId] = sessionInfo;
callback(undefined, response);
}
private getVideoCommand(info: VideoInfo, sessionId: string): Array<string> {
const sessionInfo = this.pendingSessions[sessionId];
const videoPort = sessionInfo.videoPort;
const returnVideoPort = sessionInfo.returnVideoPort;
const videoSsrc = sessionInfo.videoSSRC;
const videoSRTP = sessionInfo.videoSRTP.toString('base64');
const address = sessionInfo.address;
// Multiply the bitrate because homekit requests extremely low bitrates
const bitrate = info.max_bit_rate * 4;
// const fps = info.fps;
const videoPayloadType = info.pt;
const mtu = info.mtu; // maximum transmission unit
const output = [
'-payload_type',
videoPayloadType.toString(),
'-ssrc',
videoSsrc.toString(),
'-f',
'rtp',
'-srtp_out_suite',
'AES_CM_128_HMAC_SHA1_80',
'-srtp_out_params',
videoSRTP,
`srtp://${address}:${videoPort}?rtcpport=${videoPort}&localrtcpport=${returnVideoPort}&pkt_size=${mtu}`,
];
if (!this.camera.info.properties['streaming.enabled']) {
return [
'-loop',
'1',
'-i',
join(__dirname, `../images/offline.jpg`),
'-c:v',
this.ffmpegCodec,
...(this.ffmpegCodec === 'libx264' ? ['-preset', 'ultrafast', '-tune', 'zerolatency'] : []),
'-pix_fmt',
'yuv420p',
'-an',
...output,
];
}
return [
'-f',
'h264',
'-use_wallclock_as_timestamps',
'1',
'-r',
'15',
'-i',
'pipe:',
'-c:v',
this.ffmpegCodec,
...(this.ffmpegCodec === 'libx264' ? ['-preset', 'ultrafast', '-tune', 'zerolatency'] : []),
'-bf',
'0',
'-b:v',
`${bitrate}k`,
'-bufsize',
`${bitrate}k`,
'-maxrate',
`${2 * bitrate}k`,
'-pix_fmt',
'yuv420p',
'-an',
...output,
];
}
private getAudioCommand(info: AudioInfo, sessionId: string): Array<string> | undefined {
const sessionInfo = this.pendingSessions[sessionId];
if (!sessionInfo) {
return;
}
const address = sessionInfo.address;
const audioPort = sessionInfo.audioPort;
const returnAudioPort = sessionInfo.returnAudioPort;
const audioSsrc = sessionInfo.audioSSRC;
const audioSRTP = sessionInfo.audioSRTP.toString('base64');
const audioPayloadType = info.pt;
const audioMaxBitrate = info.max_bit_rate;
const sampleRate = info.sample_rate;
return [
'-c:a',
'libfdk_aac',
'-i',
'pipe:',
'-c:a',
'libfdk_aac',
'-profile:a',
'aac_eld',
'-ac',
'1',
'-vn',
'-ar',
`${sampleRate}k`,
'-b:a',
`${audioMaxBitrate}k`,
'-flags',
'+global_header',
'-payload_type',
audioPayloadType.toString(),
'-ssrc',
audioSsrc.toString(),
'-f',
'rtp',
'-srtp_out_suite',
'AES_CM_128_HMAC_SHA1_80',
'-srtp_out_params',
audioSRTP,
`srtp://${address}:${audioPort}?rtcpport=${audioPort}&localrtcpport=${returnAudioPort}&pkt_size=188`,
];
}
private getReturnAudioCommand(info: AudioInfo, sessionId: string): Array<string> | undefined {
const sessionInfo = this.pendingSessions[sessionId];
if (!sessionInfo) {
return;
}
return [
'-hide_banner',
'-protocol_whitelist',
'pipe,udp,rtp,file,crypto',
'-f',
'sdp',
'-c:a',
'libfdk_aac',
'-i',
'pipe:0',
'-map',
'0:0',
'-c:a',
'libspeex',
'-frames_per_packet',
'4',
'-ac',
'1',
'-vn',
'-ar',
`16k`,
'-f',
'data',
'pipe:1',
];
}
handleStreamRequest(request: StreamingRequest, callback: StreamRequestCallback): void {
const sessionId = request.sessionID;
switch (request.type) {
case StreamRequestTypes.START:
const sessionInfo = this.pendingSessions[sessionId];
const video: VideoInfo = request.video;
const audio: AudioInfo = request.audio;
const address = sessionInfo.address;
const audioSRTP = sessionInfo.audioSRTP.toString('base64');
const twoWayAudioPort = sessionInfo.twoWayAudioPort;
if (!this.ffmpegInstalled) {
this.log.error('FFMPEG is not installed. Please install it and restart homebridge.');
callback(new Error('FFmpeg not installed'));
break;
}
const videoffmpegCommand = this.getVideoCommand(video, sessionId);
const ffmpegVideo = new FfmpegProcess(
'VIDEO',
videoffmpegCommand,
this.log,
this,
sessionId,
false,
this.customFfmpeg,
(error) => {
callback(error);
},
);
let ffmpegAudio: FfmpegProcess | undefined;
let ffmpegReturnAudio: FfmpegProcess | undefined;
if (this.camera.info.properties['audio.enabled'] && this.camera.info.properties['streaming.enabled']) {
if (this.ffmpegSupportsLibfdk_acc) {
const audioffmpegCommand = this.getAudioCommand(audio, sessionId);
if (audioffmpegCommand) {
ffmpegAudio = new FfmpegProcess(
'AUDIO',
audioffmpegCommand,
this.log,
this,
sessionId,
false,
this.customFfmpeg,
);
}
if (this.ffmpegSupportsLibspeex) {
const returnAudioffmpegCommand = this.getReturnAudioCommand(audio, sessionId);
if (returnAudioffmpegCommand) {
ffmpegReturnAudio = new FfmpegProcess(
'RETURN AUDIO',
returnAudioffmpegCommand,
this.log,
this,
sessionId,
false,
this.customFfmpeg,
);
const sdpReturnAudio = [
'v=0',
'o=- 0 0 IN IP4 127.0.0.1',
's=Talk',
`c=IN IP4 ${address}`,
't=0 0',
'a=tool:libavformat 58.38.100',
`m=audio ${twoWayAudioPort} RTP/AVP 110`,
'b=AS:24',
'a=rtpmap:110 MPEG4-GENERIC/16000/1',
'a=fmtp:110 profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3; config=F8F0212C00BC00',
`a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:${audioSRTP}`,
].join('\n');
ffmpegReturnAudio.getStdin()?.write(sdpReturnAudio);
ffmpegReturnAudio.getStdin()?.end();
}
} else {
this.log.error(
"This version of FFMPEG does not support the audio codec 'libspeex'. You may need to recompile FFMPEG using '--enable-libspeex' and restart homebridge.",
);
}
} else {
this.log.error(
"This version of FFMPEG does not support the audio codec 'libfdk_aac'. You may need to recompile FFMPEG using '--enable-libfdk_aac' and restart homebridge.",
);
}
}
if (this.camera.info.properties['streaming.enabled'] && this.pendingSessions[sessionId]) {
const streamer = new NexusStreamer(
this.camera.info,
this.config.access_token,
this.config.options?.streamQuality || 3,
ffmpegVideo,
ffmpegAudio,
ffmpegReturnAudio,
this.log,
this.config.nest_token !== undefined,
);
streamer.startPlayback();
this.ongoingStreams[sessionId] = streamer;
}
// Used to switch offline/online stream on-the-fly
// this.camera.on(NestCamEvents.CAMERA_STATE_CHANGED, (state) => {
// ffmpegVideo.stop();
// ffmpegAudio?.stop();
// ffmpegReturnAudio?.stop();
// const newVideoffmpegCommand = this.getVideoCommand(video, sessionId);
// const newFfmpegVideo = new FfmpegProcess(
// 'VIDEO',
// newVideoffmpegCommand,
// this.log,
// undefined,
// this,
// sessionId,
// true,
// this.customFfmpeg,
// );
// this.ongoingSessions[sessionId] = [newFfmpegVideo, ffmpegAudio, ffmpegReturnAudio];
// if (state) {
// const streamer = new NexusStreamer(
// this.camera.info,
// this.config.access_token,
// this.log,
// this.config,
// newFfmpegVideo,
// ffmpegAudio,
// ffmpegReturnAudio,
// );
// streamer.startPlayback();
// this.ongoingStreams[sessionId] = streamer;
// } else {
// const streamer = this.ongoingStreams[sessionId];
// streamer.stopPlayback();
// }
// });
this.ongoingSessions[sessionId] = [ffmpegVideo, ffmpegAudio, ffmpegReturnAudio];
break;
case StreamRequestTypes.RECONFIGURE:
// not implemented
this.log.debug('(Not implemented) Received request to reconfigure to: ' + JSON.stringify(request.video));
callback();
break;
case StreamRequestTypes.STOP:
this.stopStream(sessionId);
callback();
break;
}
}
public stopStream(sessionId: string): void {
try {
if (this.ongoingStreams[sessionId]) {
const streamer = this.ongoingStreams[sessionId];
streamer.stopPlayback();
}
if (this.ongoingSessions[sessionId]) {
const ffmpegVideoProcess = this.ongoingSessions[sessionId][0];
ffmpegVideoProcess?.stop();
if (this.ongoingSessions[sessionId].length > 1) {
const ffmpegAudioProcess = this.ongoingSessions[sessionId][1];
const ffmpegReturnAudioProcess = this.ongoingSessions[sessionId][2];
ffmpegAudioProcess?.stop();
ffmpegReturnAudioProcess?.stop();
}
}
const sessionInfo = this.pendingSessions[sessionId];
if (sessionInfo) {
sessionInfo.rtpSplitter.close();
}
delete this.pendingSessions[sessionId];
delete this.ongoingSessions[sessionId];
this.log.debug('Stopped streaming session!');
} catch (e: any) {
this.log.error('Error occurred terminating the video process!');
this.log.error(e);
}
}
}
Example #2
Source File: new-streaming-delegate.ts From homebridge-plugin-eufy-security with Apache License 2.0 | 4 votes |
export class EufyCameraStreamingDelegate implements CameraStreamingDelegate {
private readonly hap: HAP;
private readonly log: Logger;
private readonly cameraName: string;
// private readonly videoConfig: VideoConfig;
private readonly videoProcessor: string;
private readonly interfaceName?: string;
private readonly platform: EufySecurityHomebridgePlatform;
private readonly device: FullDevice;
readonly controller: CameraController;
private debug = true;
private audio = true;
// keep track of sessions
pendingSessions: Record<string, SessionInfo> = {};
ongoingSessions: Record<string, ActiveSession> = {};
timeouts: Record<string, NodeJS.Timeout> = {};
constructor(platform: EufySecurityHomebridgePlatform, device: FullDevice) {
this.log = platform.log;
this.hap = platform.api.hap;
this.platform = platform;
this.device = device;
this.cameraName = device.device_name;
this.videoProcessor = ffmpegPath || 'ffmpeg';
platform.api.on(APIEvent.SHUTDOWN, () => {
for (const session in this.ongoingSessions) {
this.stopStream(session);
}
});
const options: CameraControllerOptions = {
cameraStreamCount: 2, // HomeKit requires at least 2 streams, but 1 is also just fine
delegate: this,
streamingOptions: {
supportedCryptoSuites: [
this.hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80,
],
video: {
resolutions: [
[320, 180, 30],
[320, 240, 15], // Apple Watch requires this configuration
[320, 240, 30],
[480, 270, 30],
[480, 360, 30],
[640, 360, 30],
[640, 480, 30],
[1280, 720, 30],
[1280, 960, 30],
[1920, 1080, 30],
[1600, 1200, 30],
],
codec: {
profiles: [
this.hap.H264Profile.BASELINE,
this.hap.H264Profile.MAIN,
this.hap.H264Profile.HIGH,
],
levels: [
this.hap.H264Level.LEVEL3_1,
this.hap.H264Level.LEVEL3_2,
this.hap.H264Level.LEVEL4_0,
],
},
},
audio: {
twoWayAudio: false, // !!this.videoConfig.returnAudioTarget,
codecs: [
{
type: AudioStreamingCodecType.AAC_ELD,
samplerate: AudioStreamingSamplerate.KHZ_16,
},
],
},
},
};
this.controller = new this.hap.CameraController(options);
}
private determineResolution(
request: SnapshotRequest | VideoInfo,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
isSnapshot: boolean,
): ResolutionInfo {
const width = request.width;
const height = request.height;
// if (!isSnapshot) {
// if ((this.videoConfig.forceMax && this.videoConfig.maxWidth) ||
// (request.width > this.videoConfig.maxWidth)) {
// width = this.videoConfig.maxWidth;
// }
// if ((this.videoConfig.forceMax && this.videoConfig.maxHeight) ||
// (request.height > this.videoConfig.maxHeight)) {
// height = this.videoConfig.maxHeight;
// }
// }
const filters: Array<string> = ['scale=1280:720'];
const noneFilter = filters.indexOf('none');
if (noneFilter >= 0) {
filters.splice(noneFilter, 1);
}
if (noneFilter < 0) {
if (width > 0 || height > 0) {
// filters.push('scale=' + (width > 0 ? '\'min(' + width + ',iw)\'' : 'iw') + ':' +
// (height > 0 ? '\'min(' + height + ',ih)\'' : 'ih') +
// ':force_original_aspect_ratio=decrease');
// filters.push('scale=trunc(iw/2)*2:trunc(ih/2)*2'); // Force to fit encoder restrictions
filters.push('scale=\'trunc(iw/2)*2\':\'trunc(ih/2)*2\''); // Force to fit encoder restrictions
}
}
return {
width: width,
height: height,
videoFilter: filters.join(','),
};
}
handleSnapshotRequest(
request: SnapshotRequest,
callback: SnapshotRequestCallback,
): void {
const resolution = this.determineResolution(request, true);
this.log.debug(
'Snapshot requested: ' + request.width + ' x ' + request.height,
this.cameraName,
this.debug,
);
this.log.debug(
'Sending snapshot: ' +
(resolution.width > 0 ? resolution.width : 'native') +
' x ' +
(resolution.height > 0 ? resolution.height : 'native'),
this.cameraName,
this.debug,
);
// get device info
this.platform.httpService
.listDevices({
device_sn: this.device.device_sn,
})
.then(([device]) => {
// let ffmpegArgs = this.videoConfig.stillImageSource || this.videoConfig.source;
let ffmpegArgs = `-i ${device.cover_path}`;
ffmpegArgs += // Still
' -frames:v 1' +
(resolution.videoFilter
? ' -filter:v ' + resolution.videoFilter
: '') +
' -f image2 -';
try {
const ffmpeg = spawn(this.videoProcessor, ffmpegArgs.split(/\s+/), {
env: process.env,
});
let imageBuffer = Buffer.alloc(0);
this.log.debug(
'Snapshot command: ' + this.videoProcessor + ' ' + ffmpegArgs,
this.cameraName,
this.debug,
);
ffmpeg.stdout.on('data', (data: Uint8Array) => {
imageBuffer = Buffer.concat([imageBuffer, data]);
});
const log = this.log;
ffmpeg.on('error', (error: string) => {
log.error(
'An error occurred while making snapshot request: ' + error,
this.cameraName,
);
});
ffmpeg.on('close', () => {
callback(undefined, imageBuffer);
});
} catch (err) {
this.log.error(err, this.cameraName);
callback(err);
}
});
}
async getIpAddress(ipv6: boolean, interfaceName?: string): Promise<string> {
if (!interfaceName) {
interfaceName = await networkInterfaceDefault();
}
const interfaces = os.networkInterfaces();
const externalInfo = interfaces[interfaceName]?.filter((info) => {
return !info.internal;
});
const preferredFamily = ipv6 ? 'IPv6' : 'IPv4';
const addressInfo =
externalInfo?.find((info) => {
return info.family === preferredFamily;
}) || externalInfo?.[0];
if (!addressInfo) {
throw new Error(
'Unable to get network address for "' + interfaceName + '"!',
);
}
return addressInfo.address;
}
async prepareStream(
request: PrepareStreamRequest,
callback: PrepareStreamCallback,
): Promise<void> {
const videoReturnPort = await getPort();
const videoSSRC = this.hap.CameraController.generateSynchronisationSource();
const audioReturnPort = await getPort();
const audioSSRC = this.hap.CameraController.generateSynchronisationSource();
const ipv6 = request.addressVersion === 'ipv6';
let currentAddress: string;
try {
currentAddress = await this.getIpAddress(ipv6, this.interfaceName);
} catch (ex) {
if (this.interfaceName) {
this.log.warn(ex + ' Falling back to default.', this.cameraName);
currentAddress = await this.getIpAddress(ipv6);
} else {
throw ex;
}
}
const sessionInfo: SessionInfo = {
address: request.targetAddress,
localAddress: currentAddress,
ipv6: ipv6,
videoPort: request.video.port,
videoReturnPort: videoReturnPort,
videoCryptoSuite: request.video.srtpCryptoSuite,
videoSRTP: Buffer.concat([
request.video.srtp_key,
request.video.srtp_salt,
]),
videoSSRC: videoSSRC,
audioPort: request.audio.port,
audioReturnPort: audioReturnPort,
audioCryptoSuite: request.audio.srtpCryptoSuite,
audioSRTP: Buffer.concat([
request.audio.srtp_key,
request.audio.srtp_salt,
]),
audioSSRC: audioSSRC,
};
const response: PrepareStreamResponse = {
address: currentAddress,
video: {
port: videoReturnPort,
ssrc: videoSSRC,
srtp_key: request.video.srtp_key,
srtp_salt: request.video.srtp_salt,
},
audio: {
port: audioReturnPort,
ssrc: audioSSRC,
srtp_key: request.audio.srtp_key,
srtp_salt: request.audio.srtp_salt,
},
};
this.pendingSessions[request.sessionID] = sessionInfo;
callback(undefined, response);
}
private startStream(
request: StartStreamRequest,
callback: StreamRequestCallback,
): void {
this.platform.httpService
.startStream({
device_sn: this.device.device_sn,
station_sn: this.device.station_sn,
proto: 2,
})
.then(async ({ url }) => {
await new Promise((r) => setTimeout(r, 500));
return url;
})
.then((url) => {
const sessionInfo = this.pendingSessions[request.sessionID];
const vcodec = 'libx264';
const mtu = 1316; // request.video.mtu is not used
const encoderOptions = '-preset ultrafast';
const resolution = this.determineResolution(request.video, false);
const fps = request.video.fps;
const videoBitrate = request.video.max_bit_rate;
// let fps = (this.videoConfig.forceMax && this.videoConfig.maxFPS) ||
// (request.video.fps > this.videoConfig.maxFPS) ?
// this.videoConfig.maxFPS : request.video.fps;
// let videoBitrate = (this.videoConfig.forceMax && this.videoConfig.maxBitrate) ||
// (request.video.max_bit_rate > this.videoConfig.maxBitrate) ?
// this.videoConfig.maxBitrate : request.video.max_bit_rate;
// if (vcodec === 'copy') {
// resolution.width = 0;
// resolution.height = 0;
// resolution.videoFilter = '';
// fps = 0;
// videoBitrate = 0;
// }
this.log.debug(
'Video stream requested: ' +
request.video.width +
' x ' +
request.video.height +
', ' +
request.video.fps +
' fps, ' +
request.video.max_bit_rate +
' kbps',
this.cameraName,
this.debug,
);
this.log.info(
'Starting video stream: ' +
(resolution.width > 0 ? resolution.width : 'native') +
' x ' +
(resolution.height > 0 ? resolution.height : 'native') +
', ' +
(fps > 0 ? fps : 'native') +
' fps, ' +
(videoBitrate > 0 ? videoBitrate : '???') +
' kbps',
this.cameraName,
);
// let ffmpegArgs = this.videoConfig.source;
let ffmpegArgs = `-i ${url}`;
ffmpegArgs += // Video
// (this.videoConfig.mapvideo ? ' -map ' + this.videoConfig.mapvideo : ' -an -sn -dn') +
' -an -sn -dn' +
' -codec:v ' +
vcodec +
' -pix_fmt yuv420p' +
' -color_range mpeg' +
// (fps > 0 ? ' -r ' + fps : '') +
' -f rawvideo' +
(encoderOptions ? ' ' + encoderOptions : '') +
(resolution.videoFilter.length > 0
? ' -filter:v ' + resolution.videoFilter
: '') +
(videoBitrate > 0 ? ' -b:v ' + videoBitrate + 'k' : '') +
' -payload_type ' +
request.video.pt;
ffmpegArgs += // Video Stream
' -ssrc ' +
sessionInfo.videoSSRC +
' -f rtp' +
' -srtp_out_suite AES_CM_128_HMAC_SHA1_80' +
' -srtp_out_params ' +
sessionInfo.videoSRTP.toString('base64') +
' srtp://' +
sessionInfo.address +
':' +
sessionInfo.videoPort +
'?rtcpport=' +
sessionInfo.videoPort +
'&pkt_size=' +
mtu;
if (this.audio) {
ffmpegArgs += // Audio
// (this.videoConfig.mapaudio ? ' -map ' + this.videoConfig.mapaudio : ' -vn -sn -dn') +
' -vn -sn -dn';
' -codec:a libfdk_aac' +
' -profile:a aac_eld' +
' -flags +global_header' +
' -f null' +
' -ar ' +
request.audio.sample_rate +
'k' +
' -b:a ' +
request.audio.max_bit_rate +
'k' +
' -ac ' +
request.audio.channel +
' -payload_type ' +
request.audio.pt;
ffmpegArgs += // Audio Stream
' -ssrc ' +
sessionInfo.audioSSRC +
' -f rtp' +
' -srtp_out_suite AES_CM_128_HMAC_SHA1_80' +
' -srtp_out_params ' +
sessionInfo.audioSRTP.toString('base64') +
' srtp://' +
sessionInfo.address +
':' +
sessionInfo.audioPort +
'?rtcpport=' +
sessionInfo.audioPort +
'&pkt_size=188';
}
if (this.debug) {
ffmpegArgs += ' -loglevel level+verbose';
}
const activeSession: ActiveSession = {};
activeSession.socket = createSocket(sessionInfo.ipv6 ? 'udp6' : 'udp4');
activeSession.socket.on('error', (err: Error) => {
this.log.error('Socket error: ' + err.name, this.cameraName);
this.stopStream(request.sessionID);
});
activeSession.socket.on('message', () => {
if (activeSession.timeout) {
clearTimeout(activeSession.timeout);
}
activeSession.timeout = setTimeout(() => {
this.log.info(
'Device appears to be inactive. Stopping stream.',
this.cameraName,
);
this.controller.forceStopStreamingSession(request.sessionID);
this.stopStream(request.sessionID);
}, request.video.rtcp_interval * 2 * 1000);
});
activeSession.socket.bind(
sessionInfo.videoReturnPort,
sessionInfo.localAddress,
);
activeSession.mainProcess = new FfmpegProcess(
this.cameraName,
request.sessionID,
this.videoProcessor,
ffmpegArgs,
this.log,
this.debug,
this,
callback,
);
// if (this.videoConfig.returnAudioTarget) {
// let ffmpegReturnArgs =
// '-hide_banner' +
// ' -protocol_whitelist pipe,udp,rtp,file,crypto' +
// ' -f sdp' +
// ' -c:a libfdk_aac' +
// ' -i pipe:' +
// ' ' + this.videoConfig.returnAudioTarget;
// if (this.videoConfig.debugReturn) {
// ffmpegReturnArgs += ' -loglevel level+verbose';
// }
// const ipVer = sessionInfo.ipv6 ? 'IP6' : 'IP4';
// const sdpReturnAudio =
// 'v=0\r\n' +
// 'o=- 0 0 IN ' + ipVer + ' ' + sessionInfo.address + '\r\n' +
// 's=Talk\r\n' +
// 'c=IN ' + ipVer + ' ' + sessionInfo.address + '\r\n' +
// 't=0 0\r\n' +
// 'm=audio ' + sessionInfo.audioReturnPort + ' RTP/AVP 110\r\n' +
// 'b=AS:24\r\n' +
// 'a=rtpmap:110 MPEG4-GENERIC/16000/1\r\n' +
// 'a=rtcp-mux\r\n' + // FFmpeg ignores this, but might as well
// 'a=fmtp:110 ' +
// 'profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3; ' +
// 'config=F8F0212C00BC00\r\n' +
// 'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + sessionInfo.audioSRTP.toString('base64') + '\r\n';
// activeSession.returnProcess = new FfmpegProcess(this.cameraName + '] [Two-way', request.sessionID,
// this.videoProcessor, ffmpegReturnArgs, this.log, this.videoConfig.debugReturn, this);
// activeSession.returnProcess.getStdin()?.end(sdpReturnAudio);
// }
this.ongoingSessions[request.sessionID] = activeSession;
delete this.pendingSessions[request.sessionID];
});
}
handleStreamRequest(
request: StreamingRequest,
callback: StreamRequestCallback,
): void {
switch (request.type) {
case StreamRequestTypes.START:
this.startStream(request, callback);
break;
case StreamRequestTypes.RECONFIGURE:
this.log.debug(
'Received request to reconfigure: ' +
request.video.width +
' x ' +
request.video.height +
', ' +
request.video.fps +
' fps, ' +
request.video.max_bit_rate +
' kbps (Ignored)',
this.cameraName,
this.debug,
);
callback();
break;
case StreamRequestTypes.STOP:
this.stopStream(request.sessionID);
callback();
break;
}
}
public stopStream(sessionId: string): void {
const session = this.ongoingSessions[sessionId];
if (session) {
if (session.timeout) {
clearTimeout(session.timeout);
}
try {
session.socket?.close();
} catch (err) {
this.log.error(
'Error occurred closing socket: ' + err,
this.cameraName,
);
}
try {
session.mainProcess?.stop();
} catch (err) {
this.log.error(
'Error occurred terminating main FFmpeg process: ' + err,
this.cameraName,
);
}
try {
session.returnProcess?.stop();
} catch (err) {
this.log.error(
'Error occurred terminating two-way FFmpeg process: ' + err,
this.cameraName,
);
}
}
delete this.ongoingSessions[sessionId];
this.log.info('Stopped video stream.', this.cameraName);
this.platform.httpService
.stopStream({
device_sn: this.device.device_sn,
station_sn: this.device.station_sn,
proto: 2,
})
.catch(() => {
// noop
})
.then(() => {
// noop
});
}
}
Example #3
Source File: streamingDelegate.ts From homebridge-eufy-security with Apache License 2.0 | 4 votes |
export class StreamingDelegate implements CameraStreamingDelegate {
private readonly hap: HAP;
private readonly log: Logger;
private readonly cameraName: string;
private readonly unbridge: boolean;
private readonly videoConfig: VideoConfig;
private readonly videoProcessor: string;
readonly controller: CameraController;
private snapshotPromise?: Promise<Buffer>;
private readonly device: Camera;
// keep track of sessions
pendingSessions: Map<string, SessionInfo> = new Map();
ongoingSessions: Map<string, ActiveSession> = new Map();
timeouts: Map<string, NodeJS.Timeout> = new Map();
constructor(platform: EufySecurityPlatform, device: Camera, cameraConfig: CameraConfig, api: API, hap: HAP) { // eslint-disable-line @typescript-eslint/explicit-module-boundary-types
this.log = platform.log;
this.hap = hap;
this.device = device;
this.cameraName = device.getName()!;
this.unbridge = false;
this.videoConfig = cameraConfig.videoConfig!;
this.videoProcessor = ffmpegPath || 'ffmpeg';
api.on(APIEvent.SHUTDOWN, () => {
for (const session in this.ongoingSessions) {
this.stopStream(session);
}
});
const options: CameraControllerOptions = {
cameraStreamCount: this.videoConfig.maxStreams || 2, // HomeKit requires at least 2 streams, but 1 is also just fine
delegate: this,
streamingOptions: {
supportedCryptoSuites: [hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80],
video: {
resolutions: [
[320, 180, 30],
[320, 240, 15], // Apple Watch requires this configuration
[320, 240, 30],
[480, 270, 30],
[480, 360, 30],
[640, 360, 30],
[640, 480, 30],
[1280, 720, 30],
[1280, 960, 30],
[1920, 1080, 30],
[1600, 1200, 30]
],
codec: {
profiles: [hap.H264Profile.BASELINE, hap.H264Profile.MAIN, hap.H264Profile.HIGH],
levels: [hap.H264Level.LEVEL3_1, hap.H264Level.LEVEL3_2, hap.H264Level.LEVEL4_0]
}
},
audio: {
twoWayAudio: !!this.videoConfig.returnAudioTarget,
codecs: [
{
type: AudioStreamingCodecType.AAC_ELD,
samplerate: AudioStreamingSamplerate.KHZ_16
/*type: AudioStreamingCodecType.OPUS,
samplerate: AudioStreamingSamplerate.KHZ_24*/
}
]
}
}
};
this.controller = new hap.CameraController(options);
}
private determineResolution(request: SnapshotRequest | VideoInfo, isSnapshot: boolean): ResolutionInfo {
const resInfo: ResolutionInfo = {
width: request.width,
height: request.height
};
if (!isSnapshot) {
if (this.videoConfig.maxWidth !== undefined &&
(this.videoConfig.forceMax || request.width > this.videoConfig.maxWidth)) {
resInfo.width = this.videoConfig.maxWidth;
}
if (this.videoConfig.maxHeight !== undefined &&
(this.videoConfig.forceMax || request.height > this.videoConfig.maxHeight)) {
resInfo.height = this.videoConfig.maxHeight;
}
}
const filters: Array<string> = this.videoConfig.videoFilter?.split(',') || [];
const noneFilter = filters.indexOf('none');
if (noneFilter >= 0) {
filters.splice(noneFilter, 1);
}
resInfo.snapFilter = filters.join(',');
if ((noneFilter < 0) && (resInfo.width > 0 || resInfo.height > 0)) {
resInfo.resizeFilter = 'scale=' + (resInfo.width > 0 ? '\'min(' + resInfo.width + ',iw)\'' : 'iw') + ':' +
(resInfo.height > 0 ? '\'min(' + resInfo.height + ',ih)\'' : 'ih') +
':force_original_aspect_ratio=decrease';
filters.push(resInfo.resizeFilter);
filters.push('scale=trunc(iw/2)*2:trunc(ih/2)*2'); // Force to fit encoder restrictions
}
if (filters.length > 0) {
resInfo.videoFilter = filters.join(',');
}
return resInfo;
}
fetchSnapshot(snapFilter?: string): Promise<Buffer> {
this.snapshotPromise = new Promise(async (resolve, reject) => {
try {
this.videoConfig.stillImageSource = '-i ' + this.device.getPropertyValue(PropertyName.DevicePictureUrl).value as string;
} catch {
this.log.warn(this.cameraName + ' fetchSnapshot: ' + 'No Snapshot found');
resolve(await readFileAsync(SnapshotUnavailablePath));
}
const startTime = Date.now();
const ffmpegArgs = (this.videoConfig.stillImageSource || this.videoConfig.source!) + // Still
' -frames:v 1' +
(snapFilter ? ' -filter:v ' + snapFilter : '') +
' -f image2 -' +
' -hide_banner' +
' -loglevel error';
this.log.debug('Snapshot command: ' + this.videoProcessor + ' ' + ffmpegArgs, this.cameraName, this.videoConfig.debug);
const ffmpeg = spawn(this.videoProcessor, ffmpegArgs.split(/\s+/), { env: process.env });
let snapshotBuffer = Buffer.alloc(0);
ffmpeg.stdout.on('data', (data) => {
snapshotBuffer = Buffer.concat([snapshotBuffer, data]);
});
ffmpeg.on('error', (error: Error) => {
reject('FFmpeg process creation failed: ' + error.message);
});
ffmpeg.stderr.on('data', (data) => {
data.toString().split('\n').forEach((line: string) => {
if (this.videoConfig.debug && line.length > 0) { // For now only write anything out when debug is set
this.log.error(line, this.cameraName + '] [Snapshot');
}
});
});
ffmpeg.on('close', () => {
if (snapshotBuffer.length > 0) {
resolve(snapshotBuffer);
} else {
reject('Failed to fetch snapshot.');
}
setTimeout(() => {
this.snapshotPromise = undefined;
}, 3 * 1000); // Expire cached snapshot after 3 seconds
const runtime = (Date.now() - startTime) / 1000;
let message = 'Fetching snapshot took ' + runtime + ' seconds.';
if (runtime < 5) {
this.log.debug(message, this.cameraName, this.videoConfig.debug);
} else {
if (!this.unbridge) {
message += ' It is highly recommended you switch to unbridge mode.';
}
if (runtime < 22) {
this.log.warn(message, this.cameraName);
} else {
message += ' The request has timed out and the snapshot has not been refreshed in HomeKit.';
this.log.error(message, this.cameraName);
}
}
});
});
return this.snapshotPromise;
}
resizeSnapshot(snapshot: Buffer, resizeFilter?: string): Promise<Buffer> {
return new Promise<Buffer>((resolve, reject) => {
const ffmpegArgs = '-i pipe:' + // Resize
' -frames:v 1' +
(resizeFilter ? ' -filter:v ' + resizeFilter : '') +
' -f image2 -';
this.log.debug('Resize command: ' + this.videoProcessor + ' ' + ffmpegArgs, this.cameraName, this.videoConfig.debug);
const ffmpeg = spawn(this.videoProcessor, ffmpegArgs.split(/\s+/), { env: process.env });
let resizeBuffer = Buffer.alloc(0);
ffmpeg.stdout.on('data', (data) => {
resizeBuffer = Buffer.concat([resizeBuffer, data]);
});
ffmpeg.on('error', (error: Error) => {
reject('FFmpeg process creation failed: ' + error.message);
});
ffmpeg.on('close', () => {
resolve(resizeBuffer);
});
ffmpeg.stdin.end(snapshot);
});
}
async handleSnapshotRequest(request: SnapshotRequest, callback: SnapshotRequestCallback): Promise<void> {
const resolution = this.determineResolution(request, true);
try {
const cachedSnapshot = !!this.snapshotPromise;
this.log.debug('Snapshot requested: ' + request.width + ' x ' + request.height,
this.cameraName, this.videoConfig.debug);
const snapshot = await (this.snapshotPromise || this.fetchSnapshot(resolution.snapFilter));
this.log.debug('Sending snapshot: ' + (resolution.width > 0 ? resolution.width : 'native') + ' x ' +
(resolution.height > 0 ? resolution.height : 'native') +
(cachedSnapshot ? ' (cached)' : ''), this.cameraName, this.videoConfig.debug);
const resized = await this.resizeSnapshot(snapshot, resolution.resizeFilter);
callback(undefined, resized);
} catch (Error) {
this.log.error(Error as string, this.cameraName);
callback(Error as Error);
}
}
async prepareStream(request: PrepareStreamRequest, callback: PrepareStreamCallback): Promise<void> {
const ipv6 = request.addressVersion === 'ipv6';
const options: pickPortOptions = {
type: 'udp',
ip: ipv6 ? '::' : '0.0.0.0',
reserveTimeout: 15
};
const videoReturnPort = await pickPort(options);
const videoSSRC = this.hap.CameraController.generateSynchronisationSource();
const audioReturnPort = await pickPort(options);
const audioSSRC = this.hap.CameraController.generateSynchronisationSource();
const sessionInfo: SessionInfo = {
address: request.targetAddress,
ipv6: ipv6,
videoPort: request.video.port,
videoReturnPort: videoReturnPort,
videoCryptoSuite: request.video.srtpCryptoSuite,
videoSRTP: Buffer.concat([request.video.srtp_key, request.video.srtp_salt]),
videoSSRC: videoSSRC,
audioPort: request.audio.port,
audioReturnPort: audioReturnPort,
audioCryptoSuite: request.audio.srtpCryptoSuite,
audioSRTP: Buffer.concat([request.audio.srtp_key, request.audio.srtp_salt]),
audioSSRC: audioSSRC
};
const response: PrepareStreamResponse = {
video: {
port: videoReturnPort,
ssrc: videoSSRC,
srtp_key: request.video.srtp_key,
srtp_salt: request.video.srtp_salt
},
audio: {
port: audioReturnPort,
ssrc: audioSSRC,
srtp_key: request.audio.srtp_key,
srtp_salt: request.audio.srtp_salt
}
};
this.pendingSessions.set(request.sessionID, sessionInfo);
callback(undefined, response);
}
private async startStream(request: StartStreamRequest, callback: StreamRequestCallback): Promise<void> {
this.videoConfig.source = '-i ' + await this.device.startStream();
const sessionInfo = this.pendingSessions.get(request.sessionID);
if (sessionInfo) {
const vcodec = this.videoConfig.vcodec || 'libx264';
const mtu = this.videoConfig.packetSize || 1316; // request.video.mtu is not used
let encoderOptions = this.videoConfig.encoderOptions;
if (!encoderOptions && vcodec === 'libx264') {
encoderOptions = '-preset ultrafast -tune zerolatency';
}
const resolution = this.determineResolution(request.video, false);
let fps = (this.videoConfig.maxFPS !== undefined &&
(this.videoConfig.forceMax || request.video.fps > this.videoConfig.maxFPS)) ?
this.videoConfig.maxFPS : request.video.fps;
let videoBitrate = (this.videoConfig.maxBitrate !== undefined &&
(this.videoConfig.forceMax || request.video.max_bit_rate > this.videoConfig.maxBitrate)) ?
this.videoConfig.maxBitrate : request.video.max_bit_rate;
if (vcodec === 'copy') {
resolution.width = 0;
resolution.height = 0;
resolution.videoFilter = undefined;
fps = 0;
videoBitrate = 0;
}
this.log.debug('Video stream requested: ' + request.video.width + ' x ' + request.video.height + ', ' +
request.video.fps + ' fps, ' + request.video.max_bit_rate + ' kbps', this.cameraName, this.videoConfig.debug);
this.log.info('Starting video stream: ' + (resolution.width > 0 ? resolution.width : 'native') + ' x ' +
(resolution.height > 0 ? resolution.height : 'native') + ', ' + (fps > 0 ? fps : 'native') +
' fps, ' + (videoBitrate > 0 ? videoBitrate : '???') + ' kbps' +
(this.videoConfig.audio ? (' (' + request.audio.codec + ')') : ''), this.cameraName);
let ffmpegArgs = this.videoConfig.source!;
ffmpegArgs += // Video
(this.videoConfig.mapvideo ? ' -map ' + this.videoConfig.mapvideo : ' -an -sn -dn') +
' -codec:v ' + vcodec +
' -pix_fmt yuv420p' +
' -color_range mpeg' +
(fps > 0 ? ' -r ' + fps : '') +
' -f rawvideo' +
(encoderOptions ? ' ' + encoderOptions : '') +
(resolution.videoFilter ? ' -filter:v ' + resolution.videoFilter : '') +
(videoBitrate > 0 ? ' -b:v ' + videoBitrate + 'k' : '') +
' -payload_type ' + request.video.pt;
ffmpegArgs += // Video Stream
' -ssrc ' + sessionInfo.videoSSRC +
' -f rtp' +
' -srtp_out_suite AES_CM_128_HMAC_SHA1_80' +
' -srtp_out_params ' + sessionInfo.videoSRTP.toString('base64') +
' srtp://' + sessionInfo.address + ':' + sessionInfo.videoPort +
'?rtcpport=' + sessionInfo.videoPort + '&pkt_size=' + mtu;
if (this.videoConfig.audio) {
if (request.audio.codec === AudioStreamingCodecType.OPUS || request.audio.codec === AudioStreamingCodecType.AAC_ELD) {
ffmpegArgs += // Audio
(this.videoConfig.mapaudio ? ' -map ' + this.videoConfig.mapaudio : ' -vn -sn -dn') +
(request.audio.codec === AudioStreamingCodecType.OPUS ?
' -codec:a libopus' +
' -application lowdelay' :
' -codec:a aac' +
' -profile:a aac_eld') +
' -flags +global_header' +
' -f null' +
' -ar ' + request.audio.sample_rate + 'k' +
' -b:a ' + request.audio.max_bit_rate + 'k' +
' -ac ' + request.audio.channel +
' -payload_type ' + request.audio.pt;
ffmpegArgs += // Audio Stream
' -ssrc ' + sessionInfo.audioSSRC +
' -f rtp' +
' -srtp_out_suite AES_CM_128_HMAC_SHA1_80' +
' -srtp_out_params ' + sessionInfo.audioSRTP.toString('base64') +
' srtp://' + sessionInfo.address + ':' + sessionInfo.audioPort +
'?rtcpport=' + sessionInfo.audioPort + '&pkt_size=188';
} else {
this.log.error('Unsupported audio codec requested: ' + request.audio.codec, this.cameraName);
}
}
ffmpegArgs += ' -loglevel level' + (this.videoConfig.debug ? '+verbose' : '') +
' -progress pipe:1';
const activeSession: ActiveSession = {};
activeSession.socket = createSocket(sessionInfo.ipv6 ? 'udp6' : 'udp4');
activeSession.socket.on('error', (err: Error) => {
this.log.error('Socket error: ' + err.message, this.cameraName);
this.stopStream(request.sessionID);
});
activeSession.socket.on('message', () => {
if (activeSession.timeout) {
clearTimeout(activeSession.timeout);
}
activeSession.timeout = setTimeout(() => {
this.log.info('Device appears to be inactive. Stopping stream.', this.cameraName);
this.controller.forceStopStreamingSession(request.sessionID);
this.stopStream(request.sessionID);
}, request.video.rtcp_interval * 5 * 1000);
});
activeSession.socket.bind(sessionInfo.videoReturnPort);
activeSession.mainProcess = new FfmpegProcess(this.cameraName, request.sessionID, this.videoProcessor,
ffmpegArgs, this.log, this.videoConfig.debug, this, callback);
if (this.videoConfig.returnAudioTarget) {
const ffmpegReturnArgs =
'-hide_banner' +
' -protocol_whitelist pipe,udp,rtp,file,crypto' +
' -f sdp' +
' -c:a aac' +
' -i pipe:' +
' ' + this.videoConfig.returnAudioTarget +
' -loglevel level' + (this.videoConfig.debugReturn ? '+verbose' : '');
const ipVer = sessionInfo.ipv6 ? 'IP6' : 'IP4';
const sdpReturnAudio =
'v=0\r\n' +
'o=- 0 0 IN ' + ipVer + ' ' + sessionInfo.address + '\r\n' +
's=Talk\r\n' +
'c=IN ' + ipVer + ' ' + sessionInfo.address + '\r\n' +
't=0 0\r\n' +
'm=audio ' + sessionInfo.audioReturnPort + ' RTP/AVP 110\r\n' +
'b=AS:24\r\n' +
'a=rtpmap:110 MPEG4-GENERIC/16000/1\r\n' +
'a=rtcp-mux\r\n' + // FFmpeg ignores this, but might as well
'a=fmtp:110 ' +
'profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3; ' +
'config=F8F0212C00BC00\r\n' +
'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + sessionInfo.audioSRTP.toString('base64') + '\r\n';
activeSession.returnProcess = new FfmpegProcess(this.cameraName + '] [Two-way', request.sessionID,
this.videoProcessor, ffmpegReturnArgs, this.log, this.videoConfig.debugReturn, this);
activeSession.returnProcess.stdin.end(sdpReturnAudio);
}
this.ongoingSessions.set(request.sessionID, activeSession);
this.pendingSessions.delete(request.sessionID);
} else {
this.log.error('Error finding session information.', this.cameraName);
callback(new Error('Error finding session information'));
}
}
handleStreamRequest(request: StreamingRequest, callback: StreamRequestCallback): void {
switch (request.type) {
case StreamRequestTypes.START:
this.startStream(request, callback);
break;
case StreamRequestTypes.RECONFIGURE:
this.log.debug('Received request to reconfigure: ' + request.video.width + ' x ' + request.video.height + ', ' +
request.video.fps + ' fps, ' + request.video.max_bit_rate + ' kbps (Ignored)', this.cameraName, this.videoConfig.debug);
callback();
break;
case StreamRequestTypes.STOP:
this.stopStream(request.sessionID);
callback();
break;
}
}
public stopStream(sessionId: string): void {
const session = this.ongoingSessions.get(sessionId);
if (session) {
if (session.timeout) {
clearTimeout(session.timeout);
}
try {
session.socket?.close();
} catch (err) {
this.log.error('Error occurred closing socket: ' + err, this.cameraName);
}
try {
session.mainProcess?.stop();
} catch (err) {
this.log.error('Error occurred terminating main FFmpeg process: ' + err, this.cameraName);
}
try {
session.returnProcess?.stop();
} catch (err) {
this.log.error('Error occurred terminating two-way FFmpeg process: ' + err, this.cameraName);
}
}
this.ongoingSessions.delete(sessionId);
this.log.info('Stopped video stream.', this.cameraName);
}
}