homebridge#AudioStreamingCodecType TypeScript Examples
The following examples show how to use
homebridge#AudioStreamingCodecType.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: accessory.ts From homebridge-nest-cam with GNU General Public License v3.0 | 5 votes |
configureController(): void {
const streamingDelegate = new StreamingDelegate(this.hap, this.camera, this.config, this.log);
const options: CameraControllerOptions = {
cameraStreamCount: 2, // HomeKit requires at least 2 streams, but 1 is also just fine
delegate: streamingDelegate,
streamingOptions: {
supportedCryptoSuites: [this.hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80],
video: {
resolutions: [
[320, 180, 30],
[320, 240, 15], // Apple Watch requires this configuration
[320, 240, 30],
[480, 270, 30],
[480, 360, 30],
[640, 360, 30],
[640, 480, 30],
[1280, 720, 30],
[1280, 960, 30],
[1920, 1080, 30],
[1600, 1200, 30],
],
codec: {
profiles: [this.hap.H264Profile.BASELINE, this.hap.H264Profile.MAIN, this.hap.H264Profile.HIGH],
levels: [this.hap.H264Level.LEVEL3_1, this.hap.H264Level.LEVEL3_2, this.hap.H264Level.LEVEL4_0],
},
},
audio: {
twoWayAudio: this.camera.info.capabilities.includes('audio.microphone'),
codecs: [
{
type: AudioStreamingCodecType.AAC_ELD,
samplerate: AudioStreamingSamplerate.KHZ_16,
},
],
},
},
};
const cameraController = new this.hap.CameraController(options);
streamingDelegate.controller = cameraController;
this.accessory.configureController(cameraController);
}
Example #2
Source File: new-streaming-delegate.ts From homebridge-plugin-eufy-security with Apache License 2.0 | 5 votes |
constructor(platform: EufySecurityHomebridgePlatform, device: FullDevice) {
this.log = platform.log;
this.hap = platform.api.hap;
this.platform = platform;
this.device = device;
this.cameraName = device.device_name;
this.videoProcessor = ffmpegPath || 'ffmpeg';
platform.api.on(APIEvent.SHUTDOWN, () => {
for (const session in this.ongoingSessions) {
this.stopStream(session);
}
});
const options: CameraControllerOptions = {
cameraStreamCount: 2, // HomeKit requires at least 2 streams, but 1 is also just fine
delegate: this,
streamingOptions: {
supportedCryptoSuites: [
this.hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80,
],
video: {
resolutions: [
[320, 180, 30],
[320, 240, 15], // Apple Watch requires this configuration
[320, 240, 30],
[480, 270, 30],
[480, 360, 30],
[640, 360, 30],
[640, 480, 30],
[1280, 720, 30],
[1280, 960, 30],
[1920, 1080, 30],
[1600, 1200, 30],
],
codec: {
profiles: [
this.hap.H264Profile.BASELINE,
this.hap.H264Profile.MAIN,
this.hap.H264Profile.HIGH,
],
levels: [
this.hap.H264Level.LEVEL3_1,
this.hap.H264Level.LEVEL3_2,
this.hap.H264Level.LEVEL4_0,
],
},
},
audio: {
twoWayAudio: false, // !!this.videoConfig.returnAudioTarget,
codecs: [
{
type: AudioStreamingCodecType.AAC_ELD,
samplerate: AudioStreamingSamplerate.KHZ_16,
},
],
},
},
};
this.controller = new this.hap.CameraController(options);
}
Example #3
Source File: streamingDelegate.ts From homebridge-eufy-security with Apache License 2.0 | 5 votes |
constructor(platform: EufySecurityPlatform, device: Camera, cameraConfig: CameraConfig, api: API, hap: HAP) { // eslint-disable-line @typescript-eslint/explicit-module-boundary-types
this.log = platform.log;
this.hap = hap;
this.device = device;
this.cameraName = device.getName()!;
this.unbridge = false;
this.videoConfig = cameraConfig.videoConfig!;
this.videoProcessor = ffmpegPath || 'ffmpeg';
api.on(APIEvent.SHUTDOWN, () => {
for (const session in this.ongoingSessions) {
this.stopStream(session);
}
});
const options: CameraControllerOptions = {
cameraStreamCount: this.videoConfig.maxStreams || 2, // HomeKit requires at least 2 streams, but 1 is also just fine
delegate: this,
streamingOptions: {
supportedCryptoSuites: [hap.SRTPCryptoSuites.AES_CM_128_HMAC_SHA1_80],
video: {
resolutions: [
[320, 180, 30],
[320, 240, 15], // Apple Watch requires this configuration
[320, 240, 30],
[480, 270, 30],
[480, 360, 30],
[640, 360, 30],
[640, 480, 30],
[1280, 720, 30],
[1280, 960, 30],
[1920, 1080, 30],
[1600, 1200, 30]
],
codec: {
profiles: [hap.H264Profile.BASELINE, hap.H264Profile.MAIN, hap.H264Profile.HIGH],
levels: [hap.H264Level.LEVEL3_1, hap.H264Level.LEVEL3_2, hap.H264Level.LEVEL4_0]
}
},
audio: {
twoWayAudio: !!this.videoConfig.returnAudioTarget,
codecs: [
{
type: AudioStreamingCodecType.AAC_ELD,
samplerate: AudioStreamingSamplerate.KHZ_16
/*type: AudioStreamingCodecType.OPUS,
samplerate: AudioStreamingSamplerate.KHZ_24*/
}
]
}
}
};
this.controller = new hap.CameraController(options);
}
Example #4
Source File: streamingDelegate.ts From homebridge-eufy-security with Apache License 2.0 | 4 votes |
private async startStream(request: StartStreamRequest, callback: StreamRequestCallback): Promise<void> {
this.videoConfig.source = '-i ' + await this.device.startStream();
const sessionInfo = this.pendingSessions.get(request.sessionID);
if (sessionInfo) {
const vcodec = this.videoConfig.vcodec || 'libx264';
const mtu = this.videoConfig.packetSize || 1316; // request.video.mtu is not used
let encoderOptions = this.videoConfig.encoderOptions;
if (!encoderOptions && vcodec === 'libx264') {
encoderOptions = '-preset ultrafast -tune zerolatency';
}
const resolution = this.determineResolution(request.video, false);
let fps = (this.videoConfig.maxFPS !== undefined &&
(this.videoConfig.forceMax || request.video.fps > this.videoConfig.maxFPS)) ?
this.videoConfig.maxFPS : request.video.fps;
let videoBitrate = (this.videoConfig.maxBitrate !== undefined &&
(this.videoConfig.forceMax || request.video.max_bit_rate > this.videoConfig.maxBitrate)) ?
this.videoConfig.maxBitrate : request.video.max_bit_rate;
if (vcodec === 'copy') {
resolution.width = 0;
resolution.height = 0;
resolution.videoFilter = undefined;
fps = 0;
videoBitrate = 0;
}
this.log.debug('Video stream requested: ' + request.video.width + ' x ' + request.video.height + ', ' +
request.video.fps + ' fps, ' + request.video.max_bit_rate + ' kbps', this.cameraName, this.videoConfig.debug);
this.log.info('Starting video stream: ' + (resolution.width > 0 ? resolution.width : 'native') + ' x ' +
(resolution.height > 0 ? resolution.height : 'native') + ', ' + (fps > 0 ? fps : 'native') +
' fps, ' + (videoBitrate > 0 ? videoBitrate : '???') + ' kbps' +
(this.videoConfig.audio ? (' (' + request.audio.codec + ')') : ''), this.cameraName);
let ffmpegArgs = this.videoConfig.source!;
ffmpegArgs += // Video
(this.videoConfig.mapvideo ? ' -map ' + this.videoConfig.mapvideo : ' -an -sn -dn') +
' -codec:v ' + vcodec +
' -pix_fmt yuv420p' +
' -color_range mpeg' +
(fps > 0 ? ' -r ' + fps : '') +
' -f rawvideo' +
(encoderOptions ? ' ' + encoderOptions : '') +
(resolution.videoFilter ? ' -filter:v ' + resolution.videoFilter : '') +
(videoBitrate > 0 ? ' -b:v ' + videoBitrate + 'k' : '') +
' -payload_type ' + request.video.pt;
ffmpegArgs += // Video Stream
' -ssrc ' + sessionInfo.videoSSRC +
' -f rtp' +
' -srtp_out_suite AES_CM_128_HMAC_SHA1_80' +
' -srtp_out_params ' + sessionInfo.videoSRTP.toString('base64') +
' srtp://' + sessionInfo.address + ':' + sessionInfo.videoPort +
'?rtcpport=' + sessionInfo.videoPort + '&pkt_size=' + mtu;
if (this.videoConfig.audio) {
if (request.audio.codec === AudioStreamingCodecType.OPUS || request.audio.codec === AudioStreamingCodecType.AAC_ELD) {
ffmpegArgs += // Audio
(this.videoConfig.mapaudio ? ' -map ' + this.videoConfig.mapaudio : ' -vn -sn -dn') +
(request.audio.codec === AudioStreamingCodecType.OPUS ?
' -codec:a libopus' +
' -application lowdelay' :
' -codec:a aac' +
' -profile:a aac_eld') +
' -flags +global_header' +
' -f null' +
' -ar ' + request.audio.sample_rate + 'k' +
' -b:a ' + request.audio.max_bit_rate + 'k' +
' -ac ' + request.audio.channel +
' -payload_type ' + request.audio.pt;
ffmpegArgs += // Audio Stream
' -ssrc ' + sessionInfo.audioSSRC +
' -f rtp' +
' -srtp_out_suite AES_CM_128_HMAC_SHA1_80' +
' -srtp_out_params ' + sessionInfo.audioSRTP.toString('base64') +
' srtp://' + sessionInfo.address + ':' + sessionInfo.audioPort +
'?rtcpport=' + sessionInfo.audioPort + '&pkt_size=188';
} else {
this.log.error('Unsupported audio codec requested: ' + request.audio.codec, this.cameraName);
}
}
ffmpegArgs += ' -loglevel level' + (this.videoConfig.debug ? '+verbose' : '') +
' -progress pipe:1';
const activeSession: ActiveSession = {};
activeSession.socket = createSocket(sessionInfo.ipv6 ? 'udp6' : 'udp4');
activeSession.socket.on('error', (err: Error) => {
this.log.error('Socket error: ' + err.message, this.cameraName);
this.stopStream(request.sessionID);
});
activeSession.socket.on('message', () => {
if (activeSession.timeout) {
clearTimeout(activeSession.timeout);
}
activeSession.timeout = setTimeout(() => {
this.log.info('Device appears to be inactive. Stopping stream.', this.cameraName);
this.controller.forceStopStreamingSession(request.sessionID);
this.stopStream(request.sessionID);
}, request.video.rtcp_interval * 5 * 1000);
});
activeSession.socket.bind(sessionInfo.videoReturnPort);
activeSession.mainProcess = new FfmpegProcess(this.cameraName, request.sessionID, this.videoProcessor,
ffmpegArgs, this.log, this.videoConfig.debug, this, callback);
if (this.videoConfig.returnAudioTarget) {
const ffmpegReturnArgs =
'-hide_banner' +
' -protocol_whitelist pipe,udp,rtp,file,crypto' +
' -f sdp' +
' -c:a aac' +
' -i pipe:' +
' ' + this.videoConfig.returnAudioTarget +
' -loglevel level' + (this.videoConfig.debugReturn ? '+verbose' : '');
const ipVer = sessionInfo.ipv6 ? 'IP6' : 'IP4';
const sdpReturnAudio =
'v=0\r\n' +
'o=- 0 0 IN ' + ipVer + ' ' + sessionInfo.address + '\r\n' +
's=Talk\r\n' +
'c=IN ' + ipVer + ' ' + sessionInfo.address + '\r\n' +
't=0 0\r\n' +
'm=audio ' + sessionInfo.audioReturnPort + ' RTP/AVP 110\r\n' +
'b=AS:24\r\n' +
'a=rtpmap:110 MPEG4-GENERIC/16000/1\r\n' +
'a=rtcp-mux\r\n' + // FFmpeg ignores this, but might as well
'a=fmtp:110 ' +
'profile-level-id=1;mode=AAC-hbr;sizelength=13;indexlength=3;indexdeltalength=3; ' +
'config=F8F0212C00BC00\r\n' +
'a=crypto:1 AES_CM_128_HMAC_SHA1_80 inline:' + sessionInfo.audioSRTP.toString('base64') + '\r\n';
activeSession.returnProcess = new FfmpegProcess(this.cameraName + '] [Two-way', request.sessionID,
this.videoProcessor, ffmpegReturnArgs, this.log, this.videoConfig.debugReturn, this);
activeSession.returnProcess.stdin.end(sdpReturnAudio);
}
this.ongoingSessions.set(request.sessionID, activeSession);
this.pendingSessions.delete(request.sessionID);
} else {
this.log.error('Error finding session information.', this.cameraName);
callback(new Error('Error finding session information'));
}
}