getMediaElement.js: A reusable library for all WebRTC applications! / Demo
<script src="//cdn.webrtc-experiment.com/getMediaElement.js"></script>
This library generates HTML Audio/Video element with rich user-interface and advance media controls. It gives you full control over each control button; and its functionality!
// you can pass HTML Video Element
document.body.appendChild( getMediaElement(HTMLVideoElement) );
// you can pass HTML Audio Element
document.body.appendChild( getMediaElement(HTMLAudioElement) );
For audio-only element; you can pass "title":
document.body.appendChild( getMediaElement(HTMLAudioElement) );
document.body.appendChild( getMediaElement(HTMLAudioElement, {
title: 'User Name',
buttons: [] // use this line only if you want to hide audio-recorder button
}) );
=
Audio+Video Stream:
navigator.webkitGetUserMedia({ audio: true, video: true }, function (audioVideoStream) {
var mediaElement = getMediaElement(audioVideoStream);
// append to HTML-BODY element
document.body.appendChild(mediaElement);
// you can acccess audio/video elements using "media" property
mediaElement.media.play();
});
If your media stream contains video tracks; then it will create HTMLVideoElement; otherwise; it will create HTMLAudioElement.
=
- You can capture
onMuted
event; and disable audio/video tracks accordingly; or hold streams using "inactive" attribute in the SDP! - You can capture
onUnMuted
event; and enable audio/video tracks accordingly; or unhold streams using "sendrecv" attribute in the SDP! - You can capture
onRecordingStarted
and use RecordRTC to record audio/video streams. - You can capture
onRecordingStopped
and invokestopRecording
method of RecordRTC to stop audio/video recording. You can write recorded audio/video blobs to indexed-db using RecordRTC's newly introducedwriteToDisk
andgetFromDisk
methods. - You can capture
onZoomin
to understand that video is NOW in full-screen mode. - You can capture
onZoomout
to understand that video is NOW in normal mode. - You can capture
onTakeSnapshot
which will be fired if someone tries to take snapshot. - You can control
buttons
array to control which button should be displayed on media element. - You can use
toggle
method to change buttons' state at runtime! - You can manually pass "toggle" object to force default behaviour.
- You can use
showOnMouseEnter
to control whether buttons should be displayed on mouse enter.
=
var mediaElement = getMediaElement(HTMLVideoElement || HTMLVideoElement || MediaStream, {
// buttons you want to display
buttons: ['mute-audio', 'mute-video', 'record-audio', 'record-video', 'full-screen', 'volume-slider', 'stop'],
// to override default behaviour
toggle: ['mute-audio', 'mute-video', 'record-audio', 'record-video'],
// fired whe audio or video is muted
onMuted: function (type) { },
// fired whe audio or video is unmuted
onUnMuted: function (type) { },
// fired whe audio or video started recording
onRecordingStarted: function (type) { },
// fired whe audio or video stopped recording
onRecordingStopped: function (type) { },
// fired for full-screen mode
onZoomin: function () { },
// fired for leaving full-screen mode
onZoomout: function () { },
// fired when call is dropped; or user is ejected
onStopped: function () { },
// fired when take-snapshot button is clicked
onTakeSnapshot: function (snapshot) { },
width: 'media-element-width',
height: 'media-element-height',
showOnMouseEnter: true,
volume: 100
});
=
- HTMLVideoElement or HTMLAudioElement or MediaStream
- Options
Second argument accepts following objects and events:
buttons
; which is an array, allows you control media buttons to be displayed.width
; you can customize width of the media-container element by passing this object. Its default value is about "36%-of-screen".height
; you can customize height of the media-container element by passing this object. Its default value is "auto".onMuted
; which is fired if audio or video stream is muted. Remember, getMediaElement.js just mutes audio/video locally; you need to send websocket messages inonMuted
event to remote party.onUnMuted
; which is reverse ofonMuted
.onRecordingStarted
; you can implement audio-recording options using RecordRTC!onRecordingStopped
; RecordRTC supportsstopRecording
method as well!onZoomin
; it is fired when media element is in full-screen mode.onZoomout
; it is fired when user leaves full-screen mode either by presssingESC
key; or by clicking a button.onTakeSnapshot
; it is fired when user clicks to take snapshot. Snapshot is passed over callback in PNG format.
=
mute-audio
mute-video
record-audio
record-video
full-screen
volume-slider
stop
=
mute-audio
mute-video
record-audio
record-video
=
toggle
method allows you toggle buttons at runtime:
mediaElement.toggle('mute-audio');
However, toggle
array is only be passed once as second argument:
var mediaElement = getMediaElement(MediaStream, {
toggle: ['mute-audio', 'mute-video', 'record-audio', 'record-video']
});
=
getMediaElement(firstArgument, secondArgument).toggle(options)
Using "toggle" method; you can customize media control buttons' state at runtime; e.g. Mute/UnMute or Zoom-in/Zoom-out etc.
var mediaElement = getMediaElement(HTMLVideoElement);
// anytime, later
mediaElement.toggle(['mute-audio']);
mediaElement.toggle(['mute-audio', 'mute-video']);
"toggle" method accepts following values:
mute-audio
mute-video
record-audio
record-video
stop
"stop" be used to auto-remove media element:
mediaElement.toggle(['stop']);
// or simply; as a string argument, instead of an array
mediaElement.toggle('stop');
=
There is a media
property that returns HTMLAudioElement or HTMLVideoElement:
var mediaElement = getMediaElement(MediaStream, {
toggle: ['mute-audio', 'mute-video', 'record-audio', 'record-video']
});
// Lets play the Video
mediaElement.media.play();
// Lets pause the Audio
mediaElement.media.pause();
// Lets change width/height at runtime
mediaElement.style.width = mediaElement.media.videoWidth + 'px';
mediaElement.style.height = mediaElement.media.videoHeight + 'px';
=
getMediaElement and RTCMultiConnection.js
var videosContainer = document.body;
// www.RTCMultiConnection.org/docs/onstream/
rtcMultiConnection.onstream = function(e) {
var mediaElement = getMediaElement(e.mediaElement, {
width: (videosContainer.clientWidth / 2) - 50,
buttons: ['mute-audio', 'mute-video', 'record-audio', 'record-video', 'full-screen', 'volume-slider', 'stop', 'take-snapshot'],
toggle: e.type == 'local' ? ['mute-audio'] : [],
onMuted: function(type) {
// www.RTCMultiConnection.org/docs/mute/
rtcMultiConnection.streams[e.streamid].mute({
audio: type == 'audio',
video: type == 'video'
});
},
onUnMuted: function(type) {
// www.RTCMultiConnection.org/docs/unmute/
rtcMultiConnection.streams[e.streamid].unmute({
audio: type == 'audio',
video: type == 'video'
});
},
onRecordingStarted: function(type) {
// www.RTCMultiConnection.org/docs/startRecording/
rtcMultiConnection.streams[e.streamid].startRecording({
audio: type == 'audio',
video: type == 'video'
});
},
onRecordingStopped: function(type) {
// www.RTCMultiConnection.org/docs/stopRecording/
rtcMultiConnection.streams[e.streamid].stopRecording(function(blob) {
if (blob.audio) rtcMultiConnection.saveToDisk(blob.audio);
else if (blob.video) rtcMultiConnection.saveToDisk(blob.audio);
else rtcMultiConnection.saveToDisk(blob);
}, type);
},
onStopped: function() {
rtcMultiConnection.peers[e.userid].drop();
},
onTakeSnapshot: function() {
if (!e.stream.getVideoTracks().length) return;
// www.RTCMultiConnection.org/docs/takeSnapshot/
rtcMultiConnection.takeSnapshot(e.userid, function(snapshot) {
// on taking snapshot!
});
}
});
videosContainer.insertBefore(mediaElement, videosContainer.firstChild);
};
// www.RTCMultiConnection.org/docs/onstreamended/
rtcMultiConnection.onstreamended = function(e) {
if (e.mediaElement.parentNode && e.mediaElement.parentNode.parentNode && e.mediaElement.parentNode.parentNode.parentNode) {
e.mediaElement.parentNode.parentNode.parentNode.removeChild(e.mediaElement.parentNode.parentNode);
}
};
=
getMediaElement is released under MIT licence . Copyright (c) 2013 Muaz Khan.