WebRTCPedia! the Encyclopedia!
HOME © Muaz Khan . @WebRTCWeb . Github . Latest issues . What's New?
MediaStream.stop is obsolete or removed; how to fix it?
var MediaStream = window.MediaStream;
if (typeof MediaStream === 'undefined' && typeof webkitMediaStream !== 'undefined') {
MediaStream = webkitMediaStream;
}
if (typeof MediaStream !== 'undefined' && !('stop' in MediaStream.prototype)) {
MediaStream.prototype.stop = function() {
this.getTracks().forEach(function(track) {
track.stop();
});
};
}
Now, feel free to use stream.stop:
stream.addEventListener('ended', function() {
alert('Stream is stopped.');
}, false);
stream.stop();
via: http://stackoverflow.com/a/11646945/552182
getRemoteStreams alternative?
var stream = new MediaStream();
peer.getReceivers().forEach(function(receiver) {
stream.addTrack(receiver.track);
});
video.srcObject = stream;
console.log(stream.getTracks()); // check console logs
Or
var audioTrack, videoTrack;
peer.getReceivers().forEach(function(receiver) {
if (receiver.track.kind === 'audio' && !audioTrack) {
audioTrack = receiver.track;
}
if (receiver.track.kind === 'video' && !videoTrack) {
videoTrack = receiver.track;
}
});
var stream = new MediaStream();
if (audioTrack) {
stream.addTrack(audioTrack);
}
if (videoTrack) {
stream.addTrack(videoTrack);
}
video.srcObject = stream;
console.log(stream.getTracks()); // check console logs
Or override "getRemoteStreams" and "getLocalStreams"
if (!peer.getRemoteStreams) {
peer.getRemoteStreams = function() {
var stream = new MediaStream();
peer.getReceivers().forEach(function(receiver) {
stream.addTrack(receiver.track);
});
return [stream];
};
}
if (!peer.getLocalStreams) {
peer.getLocalStreams = function() {
var stream = new MediaStream();
peer.getSenders().forEach(function(sender) {
stream.addTrack(sender.track);
});
return [stream];
};
}
How to detect if screen-sharing stopped or camera stopped (cross browser)?
addStreamStopListener(yourScreen, function() {
alert('screen sharing is ended.');
});
function addStreamStopListener(stream, callback) {
stream.addEventListener('ended', function() {
callback();
callback = function() {};
}, false);
stream.addEventListener('inactive', function() {
callback();
callback = function() {};
}, false);
stream.getTracks().forEach(function(track) {
track.addEventListener('ended', function() {
callback();
callback = function() {};
}, false);
track.addEventListener('inactive', function() {
callback();
callback = function() {};
}, false);
});
}
Stream end handler using promises:
addStreamStopListenerUsingPromises(stream).then(function() {
alert('screen sharing is ended.');
});
function addStreamStopListenerUsingPromises(stream) {
return new Promise(function(resolve, reject) {
try {
var callback = resolve;
stream.addEventListener('ended', function() {
callback();
callback = function() {};
}, false);
stream.addEventListener('inactive', function() {
callback();
callback = function() {};
}, false);
stream.getTracks().forEach(function(track) {
track.addEventListener('ended', function() {
callback();
callback = function() {};
}, false);
track.addEventListener('inactive', function() {
callback();
callback = function() {};
}, false);
});
} catch (e) {
reject(e);
}
});
}
How to get mp3 stream?
document.querySelector('input[type=file]').onchange = function(event) {
var file = this.files[0];
if(file && file.size > 0) {
getMp3Stream(file, function(mp3Stream) {
mp3Stream.getTracks().forEach(function(track) {
rtcPeerConnection.addTrack(track, mp3Stream);
});
// or record
var recorder = new MediaRecorder(strea);
// or merge into microphone (using MultiStreamsMixer.js)
var audioMixer = new MultiStreamsMixer([microphone, mp3Stream]);
var microphoneAndScreenStream = audioMixer.getMixedStream();
});
}
};
function getMp3Stream(mp3File, callback) {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var gainNode = context.createGain();
gainNode.connect(context.destination);
gainNode.gain.value = 0; // don't play for self
var reader = new FileReader();
reader.onload = (function(e) {
// Import callback function
// provides PCM audio data decoded as an audio buffer
context.decodeAudioData(e.target.result, createSoundSource);
});
reader.readAsArrayBuffer(mp3File);
function createSoundSource(buffer) {
var soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.start(0, 0 / 1000);
soundSource.connect(gainNode);
var destination = context.createMediaStreamDestination();
soundSource.connect(destination);
// durtion=second*1000 (milliseconds)
callback(destination.stream, buffer.duration * 1000);
}
}
How to get mp3 stream using promises?
document.querySelector('input[type=file]').onchange = function(event) {
var file = this.files[0];
if(file && file.size > 0) {
getMp3StreamUsingPromises(file).then(function(mp3Stream) {});
var mp3Stream = await getMp3StreamUsingPromises(file);
}
};
function getMp3StreamUsingPromises(mp3File) {
return new Promise(function(resolve, reject) {
try {
window.AudioContext = window.AudioContext || window.webkitAudioContext;
var context = new AudioContext();
var gainNode = context.createGain();
gainNode.connect(context.destination);
gainNode.gain.value = 0; // don't play for self
function createSoundSource(buffer) {
var soundSource = context.createBufferSource();
soundSource.buffer = buffer;
soundSource.start(0, 0 / 1000);
soundSource.connect(gainNode);
var destination = context.createMediaStreamDestination();
soundSource.connect(destination);
// durtion=second*1000 (milliseconds)
resolve(destination.stream, buffer.duration * 1000);
}
var reader = new FileReader();
reader.onload = (function(e) {
// Import callback function
// provides PCM audio data decoded as an audio buffer
context.decodeAudioData(e.target.result, createSoundSource);
});
reader.readAsArrayBuffer(mp3File);
} catch (e) {
reject(e);
}
});
}
How to check if website already has privileges to access camera/microphone?
// link: https://cdn.webrtc-experiment.com/DetectRTC/checkDevicesSupport.js
// check for microphone/camera support!
checkDeviceSupport(function() {
document.write('hasWebCam: ', hasWebcam, '<br>');
document.write('hasMicrophone: ', hasMicrophone, '<br>');
document.write('isWebsiteHasWebcamPermissions: ', isWebsiteHasWebcamPermissions, '<br>');
document.write('isWebsiteHasMicrophonePermissions: ', isWebsiteHasMicrophonePermissions, '<br>');
});
via: http://stackoverflow.com/a/30047627/552182
How to manage audio/video bitrates?
// Link this Library:
// https://cdn.webrtc-experiment.com/BandwidthHandler.js
// here is how to use it
var bandwidth = {
screen: 300, // 300kbits minimum
audio: 50, // 50kbits minimum
video: 256 // 256kbits (both min-max)
};
var isScreenSharing = false;
sdp = BandwidthHandler.setApplicationSpecificBandwidth(sdp, bandwidth, isScreenSharing);
sdp = BandwidthHandler.setVideoBitrates(sdp, {
min: bandwidth.video,
max: bandwidth.video
});
sdp = BandwidthHandler.setOpusAttributes(sdp);
How to set audio sdp parameters?
sdp = BandwidthHandler.setOpusAttributes(sdp, {
'stereo': 0, // to disable stereo (to force mono audio)
'sprop-stereo': 1,
'maxaveragebitrate': 500 * 1024 * 8, // 500 kbits
'maxplaybackrate': 500 * 1024 * 8, // 500 kbits
'cbr': 0, // disable cbr
'useinbandfec': 1, // use inband fec
'usedtx': 1, // use dtx
'maxptime': 3
});
via: http://stackoverflow.com/a/16868123/552182
How to fix Chrome camera/microphone failures?
# open this page chrome://settings/content#media-stream-micvia: http://stackoverflow.com/a/14617402/552182
How to detect local or remote stream?
// chrome 48+
var isRemoteAudioStream = false;
var isRemoteVideoStream = false;
stream.getTracks().forEach(function(track) {
if (track.remote === true && track.kind === 'audio') {
isRemoteAudioStream = true;
}
if (track.remote === true && track.kind === 'video') {
isRemoteVideoStream = true;
}
});
alert('Remote audio stream? ' + isRemoteAudioStream);
alert('Remote video stream? ' + isRemoteVideoStream);
How to capture audio+screen in a single getUserMedia request?
// firefox 38+
var screen = {
mediaSource: 'monitor' // monitor or window
};
var constraints = {
video: screen,
audio: true
};
navigator.mozGetUserMedia(constraints, successCallback, failureCallback);
How to display HTMLVideoElement poster?
htmlVideoElement.src = null; htmlVideoElement.pause(); // above two lines are mandatory htmlVideoElement.poster = '/muted.png';
Before sending data over RTC-data-channels
// define below snippet as soon as you invoked "peer.createDataChannel" method
// first step: take a reference to original "send" method
channel.internalSend = channel.send;
// define your own "send" wrapper
channel.send = function(data) {
// check for "readyState==open"
if(channel.readyState !== 'open') {
// make sure that peer is NOT closed
if(peer.iceConnectionState.search(/disconnected|closed|failed/gi) !== -1) {
return;
}
// retry after 1-second
setTimeout(function() {
channels.send(data);
}, 1000);
return;
}
// send data using real data-channel object
channel.internalSend(data);
};
Modify streams without "remaking" getUserMedia request
// supported only in firefox 43+
var originalStream = window.yourOrigianlStream;
// change from 360p to 720p
function changeVideoStreamTo720p() {
var videoConstraints = {
width: { min: 1280 },
height: { min: 720 }
};
originalStream.getTracks().forEach(function(track) {
if(track.kind === 'video') {
track.applyConstraints(videoConstraints);
}
});
}
function showFrontCamera() {
var videoConstraints = {
facingMode: { exact: 'user' }
};
originalStream.getTracks().forEach(function(track) {
if(track.kind === 'video') {
track.applyConstraints(videoConstraints);
}
});
}
function showRearCamera() {
var videoConstraints = {
facingMode: { exact: 'environment' }
};
originalStream.getTracks().forEach(function(track) {
if(track.kind === 'video') {
track.applyConstraints(videoConstraints);
}
});
}
Capture Rear or Front Camera
// supported only in firefox 43+
#1 capture rear camera
var videoConstraints = {
facingMode: { exact: 'environment' }
};
navigator.mozGetUserMedia({ video: videoConstraints }, onSuccessCallback, onFailureCallback);
#1 capture front camera
var videoConstraints = {
facingMode: { exact: 'user' }
};
navigator.mozGetUserMedia({ video: videoConstraints }, onSuccessCallback, onFailureCallback);
Select Secondary (2nd) Camera
function selectSecondaryCamera() {
// link: https://cdn.webrtc-experiment.com/DetectRTC/checkDeviceSupport.js
// LIVE Demo for this function
// https://jsfiddle.net/cf90az9q/
checkDeviceSupport(function() {
var secondDevice = videoInputDevices[1];
if(!secondDevice) return alert('Secondary webcam is NOT available.');
var videoConstraints = {
deviceId: secondDevice.deviceId
};
if(!!navigator.webkitGetUserMedia) {
videoConstraints = {
mandatory: {},
optional: [{
sourceId: secondDevice.deviceId
}]
}
}
navigator.getUserMedia = navigator.mozGetUserMedia || navigator.webkitGetUserMedia;
navigator.getUserMedia({ video: videoConstraints }, function(stream) {
//
}, function(error) {
alert(JSON.stringify(error));
});
});
}
- Maximum peer connections limit is 256.
- Opus codec minimum bandwidth is 6kbit/s
- Opus codec maximum bandwidth is 510kbits/s
- Vp8 codec minimum bandwidth is 100kbits/s
-
Vp8 codec maximum bandwidth is 2000+ kbits/s
- 720p at 30 FPS causes 1.0-to-2.0 Mbps bandwidth usage
- 360p at 30 FPS causes 0.5-to-1.0 Mbps bandwidth usage
- 180p at 30 FPS causes 0.1-to-0.5 Mbps bandwidth usage
- Maximum bandwidth used by each RTP port is 1MB.
- Only one media source i.e. "APM" is permitted.
- WebRTC currently uses UDP for RTP transmission.
- Maximum video bitrate on chrome is about 2Mb/s (i.e. 2000kbits/s).
- Minimum video bitrate on chrome is .05Mb/s (i.e. 50kbits/s).
- Starting video bitrate on chrome is .3Mb/s (i.e. 300kbits/s).
- Each RTP port is using 1 MB bandwidth. It means that 4 MB bandwidth is acquired by each peer.
- Maximum external video condecs can be used on chrome is 8.
- Maximum simulcast streams limit is 4.
- In peer-to-server connection; you can catch DTLS/SRTP (i.e. RTP/RTCP) pacekts as binary-stream.
- "peer.removeStream" Removes a stream from the PeerConnection. If the stream parameter is absent, removes the stream that was most recently added to the PeerConnection.
- Opus uses both mono and stereo codecs. Mono bitrate for opus on chrome is 32000 and stereo bitrate is 64000.
- According to draft "draft-spittka-payload-rtp-opus-03", "Opus bitrate should be in the range between 6000 and 510000", that's why opus min bitrate on chrome is 6000 and max bitrate is 510000.
- SCTP pacekt max size is 1280.
- Data max bandwidth is 30720 bps.
-
You can set following resolutions (min/max width/height):
- 1920:1080
- 1280:720
- 960:720
- 640:360
- 640:480
- 320:240
- 320:180
- 1280:800
- 1280:720
- 960:600
- 960:540
- 640:400
- 640:360
- 640:480
- 480:300
- 480:270
- 480:360
- 320:200
- 320:180
- 320:240
- 240:150
- 240:135
- 240:180
- 160:100
- 160:90
- 160:120
-
Following buffer-sizes are allowed (used in RecordRTC):
- 256
- 512
- 1024
- 2048
- 4096
- 8192
- 16384
- SampleRate must be in the range 22050 to 96000 (used in RecordRTC).
-
Possible media m-lines:
- m=audio
- m=video
- m=application
- m=data
- m=control
- m=radius
- m=tacacs
- m=diameter
- m=NAS_L2TP
- m=NAS_LOGIN
- m=NAS_NONE
- m=image
Suggestions
- If you're newcomer, newbie or beginner; you're suggested to try RTCMultiConnection.js or DataChannel.js libraries.