Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

msr

Package Overview
Dependencies
Maintainers
1
Versions
15
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

msr - npm Package Compare versions

Comparing version 1.2.8 to 1.2.9

MediaStreamRecorder.min.js

1529

MediaStreamRecorder.js

@@ -0,5 +1,32 @@

// Last time updated at September 19, 2015
// links:
// Open-Sourced: https://github.com/streamproc/MediaStreamRecorder
// https://cdn.WebRTC-Experiment.com/MediaStreamRecorder.js
// https://www.WebRTC-Experiment.com/MediaStreamRecorder.js
// npm install msr
// updates?
/*
-. this.recorderType = StereoAudioRecorder;
*/
//------------------------------------
// Browsers Support::
// Chrome (all versions) [ audio/video separately ]
// Firefox ( >= 29 ) [ audio/video in single webm/mp4 container or only audio in ogg ]
// Opera (all versions) [ same as chrome ]
// Android (Chrome) [ only video ]
// Android (Opera) [ only video ]
// Android (Firefox) [ only video ]
// Microsoft Edge (Only Audio & Gif)
//------------------------------------
// Muaz Khan - www.MuazKhan.com
// MIT License - www.webrtc-experiment.com/licence
// Documentation - github.com/streamproc/MediaStreamRecorder
// MIT License - www.WebRTC-Experiment.com/licence
//------------------------------------
'use strict';
// ______________________

@@ -9,3 +36,5 @@ // MediaStreamRecorder.js

function MediaStreamRecorder(mediaStream) {
if (!mediaStream) throw 'MediaStream is mandatory.';
if (!mediaStream) {
throw 'MediaStream is mandatory.';
}

@@ -17,14 +46,27 @@ // void start(optional long timeSlice)

// That's why using WebAudio API to record stereo audio in WAV format
var Recorder = IsChrome ? window.StereoRecorder : window.MediaRecorderWrapper;
var Recorder = IsChrome || IsEdge || IsOpera ? window.StereoAudioRecorder || IsEdge || IsOpera : window.MediaRecorderWrapper;
// video recorder (in WebM format)
if (this.mimeType.indexOf('video') != -1) {
Recorder = IsChrome ? window.WhammyRecorder : window.MediaRecorderWrapper;
if (this.mimeType.indexOf('video') !== -1) {
Recorder = IsChrome || IsEdge || IsOpera ? window.WhammyRecorder : window.MediaRecorderWrapper;
}
// video recorder (in GIF format)
if (this.mimeType === 'image/gif') Recorder = window.GifRecorder;
if (this.mimeType === 'image/gif') {
Recorder = window.GifRecorder;
}
// allows forcing StereoAudioRecorder.js on Edge/Firefox
if (this.recorderType) {
Recorder = this.recorderType;
}
mediaRecorder = new Recorder(mediaStream);
mediaRecorder.ondataavailable = this.ondataavailable;
mediaRecorder.blobs = [];
var self = this;
mediaRecorder.ondataavailable = function(data) {
mediaRecorder.blobs.push(data);
self.ondataavailable(data);
};
mediaRecorder.onstop = this.onstop;

@@ -41,3 +83,6 @@ mediaRecorder.onStartedDrawingNonBlankFrames = this.onStartedDrawingNonBlankFrames;

this.clearOldRecordedFrames = function() {
if (!mediaRecorder) return;
if (!mediaRecorder) {
return;
}
mediaRecorder.clearOldRecordedFrames();

@@ -47,3 +92,5 @@ };

this.stop = function() {
if (mediaRecorder) mediaRecorder.stop();
if (mediaRecorder) {
mediaRecorder.stop();
}
};

@@ -59,2 +106,36 @@

this.save = function(file, fileName) {
if (!file) {
if (!mediaRecorder) {
return;
}
var bigBlob = new Blob(mediaRecorder.blobs, {
type: mediaRecorder.blobs[0].type || this.mimeType
});
invokeSaveAsDialog(bigBlob);
return;
}
invokeSaveAsDialog(file, fileName);
};
this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
console.log('Paused recording.', this.mimeType || mediaRecorder.mimeType);
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
console.log('Resumed recording.', this.mimeType || mediaRecorder.mimeType);
};
this.recorderType = null; // StereoAudioRecorder || WhammyRecorder || MediaRecorderWrapper || GifRecorder
// Reference to "MediaRecorder.js"

@@ -64,41 +145,170 @@ var mediaRecorder;

// below scripts are used to auto-load required files.
// ______________________
// MultiStreamRecorder.js
function loadScript(src, onload) {
var root = window.MediaStreamRecorderScriptsDir;
function MultiStreamRecorder(mediaStream) {
if (!mediaStream) {
throw 'MediaStream is mandatory.';
}
var script = document.createElement('script');
script.src = root + src;
script.onload = onload || function() {};
document.documentElement.appendChild(script);
var self = this;
var isFirefox = !!navigator.mozGetUserMedia;
this.stream = mediaStream;
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
audioRecorder = new MediaStreamRecorder(mediaStream);
videoRecorder = new MediaStreamRecorder(mediaStream);
audioRecorder.mimeType = 'audio/ogg';
videoRecorder.mimeType = 'video/webm';
for (var prop in this) {
if (typeof this[prop] !== 'function') {
audioRecorder[prop] = videoRecorder[prop] = this[prop];
}
}
audioRecorder.ondataavailable = function(blob) {
if (!audioVideoBlobs[recordingInterval]) {
audioVideoBlobs[recordingInterval] = {};
}
audioVideoBlobs[recordingInterval].audio = blob;
if (audioVideoBlobs[recordingInterval].video && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) {
audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true;
fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]);
}
};
videoRecorder.ondataavailable = function(blob) {
if (isFirefox) {
return self.ondataavailable({
video: blob,
audio: blob
});
}
if (!audioVideoBlobs[recordingInterval]) {
audioVideoBlobs[recordingInterval] = {};
}
audioVideoBlobs[recordingInterval].video = blob;
if (audioVideoBlobs[recordingInterval].audio && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) {
audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true;
fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]);
}
};
function fireOnDataAvailableEvent(blobs) {
recordingInterval++;
self.ondataavailable(blobs);
}
videoRecorder.onstop = audioRecorder.onstop = function(error) {
self.onstop(error);
};
if (!isFirefox) {
// to make sure both audio/video are synced.
videoRecorder.onStartedDrawingNonBlankFrames = function() {
videoRecorder.clearOldRecordedFrames();
audioRecorder.start(timeSlice);
};
videoRecorder.start(timeSlice);
} else {
videoRecorder.start(timeSlice);
}
};
this.stop = function() {
if (audioRecorder) {
audioRecorder.stop();
}
if (videoRecorder) {
videoRecorder.stop();
}
};
this.ondataavailable = function(blob) {
console.log('ondataavailable..', blob);
};
this.onstop = function(error) {
console.warn('stopped..', error);
};
this.pause = function() {
if (audioRecorder) {
audioRecorder.pause();
}
if (videoRecorder) {
videoRecorder.pause();
}
};
this.resume = function() {
if (audioRecorder) {
audioRecorder.resume();
}
if (videoRecorder) {
videoRecorder.resume();
}
};
var audioRecorder;
var videoRecorder;
var audioVideoBlobs = {};
var recordingInterval = 0;
}
// Muaz Khan - www.MuazKhan.com
// MIT License - www.webrtc-experiment.com/licence
// Documentation - github.com/streamproc/MediaStreamRecorder
// _____________________________
// Cross-Browser-Declarations.js
// animation-frame used in WebM recording
if (!window.requestAnimationFrame) {
requestAnimationFrame = window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame;
}
// WebAudio API representer
if (typeof AudioContext !== 'undefined') {
if (typeof webkitAudioContext !== 'undefined') {
/*global AudioContext:true*/
var AudioContext = webkitAudioContext;
}
if (!window.cancelAnimationFrame) {
cancelAnimationFrame = window.webkitCancelAnimationFrame || window.mozCancelAnimationFrame;
if (typeof mozAudioContext !== 'undefined') {
/*global AudioContext:true*/
var AudioContext = mozAudioContext;
}
}
// WebAudio API representer
if (!window.AudioContext) {
window.AudioContext = window.webkitAudioContext || window.mozAudioContext;
if (typeof URL !== 'undefined' && typeof webkitURL !== 'undefined') {
/*global URL:true*/
var URL = webkitURL;
}
URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var IsEdge = navigator.userAgent.indexOf('Edge') !== -1 && (!!navigator.msSaveBlob || !!navigator.msSaveOrOpenBlob);
var IsOpera = !!window.opera || navigator.userAgent.indexOf('OPR/') !== -1;
var IsChrome = !IsEdge && !IsEdge && !!navigator.webkitGetUserMedia;
if (window.webkitMediaStream) window.MediaStream = window.webkitMediaStream;
if (typeof navigator !== 'undefined') {
if (typeof navigator.webkitGetUserMedia !== 'undefined') {
navigator.getUserMedia = navigator.webkitGetUserMedia;
}
IsChrome = !!navigator.webkitGetUserMedia;
if (typeof navigator.mozGetUserMedia !== 'undefined') {
navigator.getUserMedia = navigator.mozGetUserMedia;
}
} else {
/*global navigator:true */
var navigator = {
getUserMedia: {}
};
}
if (typeof webkitMediaStream !== 'undefined') {
var MediaStream = webkitMediaStream;
}
// Merge all other data-types except "function"

@@ -119,7 +329,9 @@

for (var o in obj) {
if (o.indexOf('-') != -1) {
if (o.indexOf('-') !== -1) {
var splitted = o.split('-');
var name = splitted[0] + splitted[1].split('')[0].toUpperCase() + splitted[1].substr(1);
output[name] = obj[o];
} else output[o] = obj[o];
} else {
output[o] = obj[o];
}
}

@@ -129,2 +341,70 @@ return output;

// "dropFirstFrame" has been added by Graham Roth
// https://github.com/gsroth
function dropFirstFrame(arr) {
arr.shift();
return arr;
}
function invokeSaveAsDialog(file, fileName) {
if (!file) {
throw 'Blob object is required.';
}
if (!file.type) {
file.type = 'video/webm';
}
var fileExtension = file.type.split('/')[1];
if (fileName && fileName.indexOf('.') !== -1) {
var splitted = fileName.split('.');
fileName = splitted[0];
fileExtension = splitted[1];
}
var fileFullName = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + fileExtension;
if (typeof navigator.msSaveOrOpenBlob !== 'undefined') {
return navigator.msSaveOrOpenBlob(file, fileFullName);
} else if (typeof navigator.msSaveBlob !== 'undefined') {
return navigator.msSaveBlob(file, fileFullName);
}
var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(file);
hyperlink.target = '_blank';
hyperlink.download = fileFullName;
if (!!navigator.mozGetUserMedia) {
hyperlink.onclick = function() {
(document.body || document.documentElement).removeChild(hyperlink);
};
(document.body || document.documentElement).appendChild(hyperlink);
}
var evt = new MouseEvent('click', {
view: window,
bubbles: true,
cancelable: true
});
hyperlink.dispatchEvent(evt);
if (!navigator.mozGetUserMedia) {
URL.revokeObjectURL(hyperlink.href);
}
}
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
// ______________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129

@@ -136,3 +416,9 @@ // ObjectStore.js

// ================
// ______________ (used to handle stuff like http://goo.gl/xmE5eg) issue #129
// ObjectStore.js
var ObjectStore = {
AudioContext: window.AudioContext || window.webkitAudioContext
};
// ==================
// MediaRecorder.js

@@ -155,3 +441,3 @@

// using a dirty workaround to generate audio-only stream so that we can get audio/ogg output.
if (this.type == 'audio' && mediaStream.getVideoTracks && mediaStream.getVideoTracks().length && !navigator.mozGetUserMedia) {
if (this.type === 'audio' && mediaStream.getVideoTracks && mediaStream.getVideoTracks().length && !navigator.mozGetUserMedia) {
var context = new AudioContext();

@@ -176,4 +462,11 @@ var mediaStreamSource = context.createMediaStreamSource(mediaStream);

function startRecording() {
if (isStopRecording) return;
if (isStopRecording) {
return;
}
if (isPaused) {
setTimeout(startRecording, 500);
return;
}
mediaRecorder = new MediaRecorder(mediaStream);

@@ -183,4 +476,4 @@

console.log('ondataavailable', e.data.type, e.data.size, e.data);
// mediaRecorder.state == 'recording' means that media recorder is associated with "session"
// mediaRecorder.state == 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.
// mediaRecorder.state === 'recording' means that media recorder is associated with "session"
// mediaRecorder.state === 'stopped' means that media recorder is detached from the "session" ... in this case; "session" will also be deleted.

@@ -261,2 +554,28 @@ if (!e.data.size) {

var isPaused = false;
this.pause = function() {
if (!mediaRecorder) {
return;
}
isPaused = true;
if (mediaRecorder.state === 'recording') {
mediaRecorder.pause();
}
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
isPaused = false;
if (mediaRecorder.state === 'paused') {
mediaRecorder.resume();
}
};
this.ondataavailable = this.onstop = function() {};

@@ -275,6 +594,6 @@

// =================
// StereoRecorder.js
// ======================
// StereoAudioRecorder.js
function StereoRecorder(mediaStream) {
function StereoAudioRecorder(mediaStream) {
// void start(optional long timeSlice)

@@ -285,3 +604,3 @@ // timestamp to fire "ondataavailable"

mediaRecorder = new StereoAudioRecorder(mediaStream, this);
mediaRecorder = new StereoAudioRecorderHelper(mediaStream, this);

@@ -302,2 +621,18 @@ mediaRecorder.record();

this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
};
this.ondataavailable = function() {};

@@ -310,17 +645,19 @@

// ======================
// StereoAudioRecorder.js
// ============================
// StereoAudioRecorderHelper.js
// source code from: http://typedarray.org/wp-content/projects/WebAudioRecorder/script.js
function StereoAudioRecorder(mediaStream, root) {
function StereoAudioRecorderHelper(mediaStream, root) {
// variables
var deviceSampleRate = 44100; // range: 22050 to 96000
// check device sample rate
if(window.AudioContext){
deviceSampleRate = (new window.AudioContext()).sampleRate;
if (!ObjectStore.AudioContextConstructor) {
ObjectStore.AudioContextConstructor = new ObjectStore.AudioContext();
}
// check device sample rate
deviceSampleRate = ObjectStore.AudioContextConstructor.sampleRate;
var leftchannel = [];

@@ -334,3 +671,2 @@ var rightchannel = [];

var sampleRate = root.sampleRate || deviceSampleRate;
var audioContext;
var context;

@@ -348,3 +684,7 @@

this.requestData = function() {
if (recordingLength == 0) {
if (isPaused) {
return;
}
if (recordingLength === 0) {
requestDataInvoked = false;

@@ -356,5 +696,5 @@ return;

// clone stuff
var internal_leftchannel = leftchannel.slice(0);
var internal_rightchannel = rightchannel.slice(0);
var internal_recordingLength = recordingLength;
var internalLeftChannel = leftchannel.slice(0);
var internalRightChannel = rightchannel.slice(0);
var internalRecordingLength = recordingLength;

@@ -367,4 +707,4 @@ // reset the buffers for the new recording

// we flat the left and right channels down
var leftBuffer = mergeBuffers(internal_leftchannel, internal_recordingLength);
var rightBuffer = mergeBuffers(internal_leftchannel, internal_recordingLength);
var leftBuffer = mergeBuffers(internalLeftChannel, internalRecordingLength);
var rightBuffer = mergeBuffers(internalLeftChannel, internalRecordingLength);

@@ -461,9 +801,2 @@ // we interleave both channels together

// creates the audio context
// creates the audio context
var audioContext = ObjectStore.AudioContext;
if (!ObjectStore.AudioContextConstructor)
ObjectStore.AudioContextConstructor = new audioContext();
var context = ObjectStore.AudioContextConstructor;

@@ -491,3 +824,5 @@

var bufferSize = root.bufferSize || 2048;
if (root.bufferSize == 0) bufferSize = 0;
if (root.bufferSize === 0) {
bufferSize = 0;
}

@@ -514,9 +849,21 @@ if (context.createJavaScriptNode) {

if (numChannels == 1) {
if (numChannels === 1) {
console.debug('All right-channels are skipped.');
}
var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
// http://webaudio.github.io/web-audio-api/#the-scriptprocessornode-interface
scriptprocessornode.onaudioprocess = function(e) {
if (!recording || requestDataInvoked) return;
if (!recording || requestDataInvoked || isPaused) {
return;
}

@@ -526,3 +873,3 @@ var left = e.inputBuffer.getChannelData(0);

if (numChannels == 2) {
if (numChannels === 2) {
var right = e.inputBuffer.getChannelData(1);

@@ -538,3 +885,63 @@ rightchannel.push(new Float32Array(right));

// =======================
// ===================
// WhammyRecorder.js
function WhammyRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new WhammyRecorderHelper(mediaStream, this);
for (var prop in this) {
if (typeof this[prop] !== 'function') {
mediaRecorder[prop] = this[prop];
}
}
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
}
};
this.clearOldRecordedFrames = function() {
if (mediaRecorder) {
mediaRecorder.clearOldRecordedFrames();
}
};
this.pause = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.pause();
};
this.resume = function() {
if (!mediaRecorder) {
return;
}
mediaRecorder.resume();
};
this.ondataavailable = function() {};
// Reference to "WhammyRecorder" object
var mediaRecorder;
var timeout;
}
// ==========================
// WhammyRecorderHelper.js

@@ -544,8 +951,16 @@

this.record = function(timeSlice) {
if (!this.width) this.width = 320;
if (!this.height) this.height = 240;
if (!this.width) {
this.width = 320;
}
if (!this.height) {
this.height = 240;
}
if (this.video && this.video instanceof HTMLVideoElement) {
if (!this.width) this.width = video.videoWidth || video.clientWidth || 320;
if (!this.height) this.height = video.videoHeight || video.clientHeight || 240;
if (!this.width) {
this.width = video.videoWidth || video.clientWidth || 320;
}
if (!this.height) {
this.height = video.videoHeight || video.clientHeight || 240;
}
}

@@ -572,2 +987,3 @@

if (this.video && this.video instanceof HTMLVideoElement) {
this.isHTMLObject = true;
video = this.video.cloneNode();

@@ -595,3 +1011,3 @@ } else {

this.clearOldRecordedFrames = function() {
frames = [];
whammy.frames = [];
};

@@ -601,3 +1017,7 @@

this.requestData = function() {
if (!frames.length) {
if (isPaused) {
return;
}
if (!whammy.frames.length) {
requestDataInvoked = false;

@@ -609,13 +1029,14 @@ return;

// clone stuff
var internal_frames = frames.slice(0);
var internalFrames = whammy.frames.slice(0);
// reset the frames for the new recording
frames = [];
whammy.frames = dropBlackFrames(internal_frames, -1);
whammy.frames = dropBlackFrames(internalFrames, -1);
var WebM_Blob = whammy.compile();
root.ondataavailable(WebM_Blob);
whammy.compile(function(whammyBlob) {
root.ondataavailable(whammyBlob);
console.debug('video recorded blob size:', bytesToSize(whammyBlob.size));
});
console.debug('video recorded blob size:', bytesToSize(WebM_Blob.size));
whammy.frames = [];

@@ -625,13 +1046,23 @@ requestDataInvoked = false;

var frames = [];
var isOnStartedDrawingNonBlankFramesInvoked = false;
function drawFrames() {
if (isStopDrawing) return;
if (isPaused) {
lastTime = new Date().getTime();
setTimeout(drawFrames, 500);
return;
}
if (requestDataInvoked) return setTimeout(drawFrames, 100);
if (isStopDrawing) {
return;
}
if (requestDataInvoked) {
return setTimeout(drawFrames, 100);
}
var duration = new Date().getTime() - lastTime;
if (!duration) return drawFrames();
if (!duration) {
return drawFrames();
}

@@ -641,9 +1072,16 @@ // via webrtc-experiment#206, by Jack i.e. @Seymourr

if (!self.isHTMLObject && video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, canvas.width, canvas.height);
!isStopDrawing && frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(frames[frames.length - 1])) {
if (!isStopDrawing) {
whammy.frames.push({
duration: duration,
image: canvas.toDataURL('image/webp')
});
}
if (!isOnStartedDrawingNonBlankFramesInvoked && !isBlankFrame(whammy.frames[whammy.frames.length - 1])) {
isOnStartedDrawingNonBlankFramesInvoked = true;

@@ -800,412 +1238,19 @@ root.onStartedDrawingNonBlankFrames();

}
}
// =================
// WhammyRecorder.js
var isPaused = false;
function WhammyRecorder(mediaStream) {
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
timeSlice = timeSlice || 1000;
mediaRecorder = new WhammyRecorderHelper(mediaStream, this);
for (var prop in this) {
if (typeof this[prop] !== 'function') {
mediaRecorder[prop] = this[prop];
}
}
mediaRecorder.record();
timeout = setInterval(function() {
mediaRecorder.requestData();
}, timeSlice);
this.pause = function() {
isPaused = true;
};
this.stop = function() {
if (mediaRecorder) {
mediaRecorder.stop();
clearTimeout(timeout);
}
this.resume = function() {
isPaused = false;
};
this.clearOldRecordedFrames = function() {
if (mediaRecorder) {
mediaRecorder.clearOldRecordedFrames();
}
};
this.ondataavailable = function() {};
// Reference to "WhammyRecorder" object
var mediaRecorder;
var timeout;
}
// Muaz Khan - https://github.com/muaz-khan
// neizerth - https://github.com/neizerth
// MIT License - https://www.webrtc-experiment.com/licence/
// Documentation - https://github.com/streamproc/MediaStreamRecorder
// Note:
// ==========================================================
// whammy.js is an "external library"
// and has its own copyrights. Taken from "Whammy" project.
// https://github.com/antimatter15/whammy/blob/master/LICENSE
// =========
// Whammy.js
// todo: Firefox now supports webp for webm containers!
// their MediaRecorder implementation works well!
// should we provide an option to record via Whammy.js or MediaRecorder API is a better solution?
var Whammy = (function() {
function toWebM(frames) {
var info = checkFrames(frames);
var CLUSTER_MAX_DURATION = 30000;
var EBML = [{
"id": 0x1a45dfa3, // EBML
"data": [{
"data": 1,
"id": 0x4286 // EBMLVersion
}, {
"data": 1,
"id": 0x42f7 // EBMLReadVersion
}, {
"data": 4,
"id": 0x42f2 // EBMLMaxIDLength
}, {
"data": 8,
"id": 0x42f3 // EBMLMaxSizeLength
}, {
"data": "webm",
"id": 0x4282 // DocType
}, {
"data": 2,
"id": 0x4287 // DocTypeVersion
}, {
"data": 2,
"id": 0x4285 // DocTypeReadVersion
}]
}, {
"id": 0x18538067, // Segment
"data": [{
"id": 0x1549a966, // Info
"data": [{
"data": 1e6, //do things in millisecs (num of nanosecs for duration scale)
"id": 0x2ad7b1 // TimecodeScale
}, {
"data": "whammy",
"id": 0x4d80 // MuxingApp
}, {
"data": "whammy",
"id": 0x5741 // WritingApp
}, {
"data": doubleToString(info.duration),
"id": 0x4489 // Duration
}]
}, {
"id": 0x1654ae6b, // Tracks
"data": [{
"id": 0xae, // TrackEntry
"data": [{
"data": 1,
"id": 0xd7 // TrackNumber
}, {
"data": 1,
"id": 0x63c5 // TrackUID
}, {
"data": 0,
"id": 0x9c // FlagLacing
}, {
"data": "und",
"id": 0x22b59c // Language
}, {
"data": "V_VP8",
"id": 0x86 // CodecID
}, {
"data": "VP8",
"id": 0x258688 // CodecName
}, {
"data": 1,
"id": 0x83 // TrackType
}, {
"id": 0xe0, // Video
"data": [{
"data": info.width,
"id": 0xb0 // PixelWidth
}, {
"data": info.height,
"id": 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < CLUSTER_MAX_DURATION);
var clusterCounter = 0;
var cluster = {
"id": 0x1f43b675, // Cluster
"data": [{
"data": clusterTimecode,
"id": 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}))
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
return generateEBML(EBML);
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
console.warn('Something went wrong. Maybe WebP format is not supported in the current browser.');
return;
}
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
return new Uint8Array(data);
}
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data == 'object') data = generateEBML(data);
if (typeof data == 'number') data = bitsToBuffer(data.toString(2));
if (typeof data == 'string') data = strToBuffer(data);
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var size_str = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: "video/webm"
});
}
function toBinStr_old(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
function generateEBML_old(json) {
var ebml = '';
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data == 'object') data = generateEBML_old(data);
if (typeof data == 'number') data = toBinStr_old(data.toString(2));
var len = data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var size_str = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - size_str.length)).join('0') + size_str;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml += toBinStr_old(json[i].id.toString(2)) + toBinStr_old(size) + data;
}
return ebml;
}
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) flags |= 128;
if (data.invisible) flags |= 8;
if (data.lacing) flags |= (data.lacing << 1);
if (data.discardable) flags |= 1;
if (data.trackNum > 127) {
throw "TrackNumber > 127 not supported";
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frame_start = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) c[i] = VP8.charCodeAt(frame_start + 3 + i);
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
}
function parseRIFF(string) {
var offset = 0;
var chunks = {};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
if (id == 'RIFF' || id == 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
// a more abstract-ish API
function WhammyVideo(duration) {
this.frames = [];
this.duration = duration || 1;
this.quality = 100;
}
WhammyVideo.prototype.add = function(frame, duration) {
if ('canvas' in frame) { //CanvasRenderingContext2D
frame = frame.canvas;
}
if ('toDataURL' in frame) {
frame = frame.toDataURL('image/webp', this.quality);
}
if (!(/^data:image\/webp;base64,/ig).test(frame)) {
throw "Input must be formatted properly as a base64 encoded DataURI of type image/webp";
}
this.frames.push({
image: frame,
duration: duration || this.duration
});
};
WhammyVideo.prototype.compile = function() {
return new toWebM(this.frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
};
return {
Video: WhammyVideo,
toWebM: toWebM
};
})();
// Muaz Khan - https://github.com/muaz-khan
// neizerth - https://github.com/neizerth
// MIT License - https://www.webrtc-experiment.com/licence/
// Documentation - https://github.com/streamproc/MediaStreamRecorder
// ==========================================================
// --------------
// GifRecorder.js
function GifRecorder(mediaStream) {
if (!window.GIFEncoder) {
if (typeof GIFEncoder === 'undefined') {
throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js';

@@ -1255,2 +1300,7 @@ }

function drawVideoFrame(time) {
if (isPaused) {
setTimeout(drawVideoFrame, 500, time);
return;
}
lastAnimationFrame = requestAnimationFrame(drawVideoFrame);

@@ -1263,4 +1313,10 @@

// ~10 fps
if (time - lastFrameTime < 90) return;
if (time - lastFrameTime < 90) {
return;
}
if (video.paused) {
video.play(); // Android
}
context.drawImage(video, 0, 0, imageWidth, imageHeight);

@@ -1291,3 +1347,3 @@

gifEncoder.stream().bin = [];
};
}

@@ -1302,2 +1358,12 @@ this.stop = function() {

var isPaused = false;
this.pause = function() {
isPaused = true;
};
this.resume = function() {
isPaused = false;
};
this.ondataavailable = function() {};

@@ -1325,110 +1391,431 @@ this.onstop = function() {};

// ______________________
// MultiStreamRecorder.js
// https://github.com/antimatter15/whammy/blob/master/LICENSE
// _________
// Whammy.js
function MultiStreamRecorder(mediaStream) {
if (!mediaStream) throw 'MediaStream is mandatory.';
// todo: Firefox now supports webp for webm containers!
// their MediaRecorder implementation works well!
// should we provide an option to record via Whammy.js or MediaRecorder API is a better solution?
var self = this;
var isFirefox = !!navigator.mozGetUserMedia;
/**
* Whammy is a standalone class used by {@link RecordRTC} to bring video recording in Chrome. It is written by {@link https://github.com/antimatter15|antimatter15}
* @summary A real time javascript webm encoder based on a canvas hack.
* @typedef Whammy
* @class
* @example
* var recorder = new Whammy().Video(15);
* recorder.add(context || canvas || dataURL);
* var output = recorder.compile();
*/
this.stream = mediaStream;
var Whammy = (function() {
// a more abstract-ish API
// void start(optional long timeSlice)
// timestamp to fire "ondataavailable"
this.start = function(timeSlice) {
audioRecorder = new MediaStreamRecorder(mediaStream);
videoRecorder = new MediaStreamRecorder(mediaStream);
function WhammyVideo(duration) {
this.frames = [];
this.duration = duration || 1;
this.quality = 0.8;
}
audioRecorder.mimeType = 'audio/ogg';
videoRecorder.mimeType = 'video/webm';
/**
* Pass Canvas or Context or image/webp(string) to {@link Whammy} encoder.
* @method
* @memberof Whammy
* @example
* recorder = new Whammy().Video(0.8, 100);
* recorder.add(canvas || context || 'image/webp');
* @param {string} frame - Canvas || Context || image/webp
* @param {number} duration - Stick a duration (in milliseconds)
*/
WhammyVideo.prototype.add = function(frame, duration) {
if ('canvas' in frame) { //CanvasRenderingContext2D
frame = frame.canvas;
}
for (var prop in this) {
if (typeof this[prop] !== 'function') {
audioRecorder[prop] = videoRecorder[prop] = this[prop];
}
if ('toDataURL' in frame) {
frame = frame.toDataURL('image/webp', this.quality);
}
audioRecorder.ondataavailable = function(blob) {
if (!audioVideoBlobs[recordingInterval]) {
audioVideoBlobs[recordingInterval] = {};
if (!(/^data:image\/webp;base64,/ig).test(frame)) {
throw 'Input must be formatted properly as a base64 encoded DataURI of type image/webp';
}
this.frames.push({
image: frame,
duration: duration || this.duration
});
};
function processInWebWorker(_function) {
var blob = URL.createObjectURL(new Blob([_function.toString(),
'this.onmessage = function (e) {' + _function.name + '(e.data);}'
], {
type: 'application/javascript'
}));
var worker = new Worker(blob);
URL.revokeObjectURL(blob);
return worker;
}
function whammyInWebWorker(frames) {
function ArrayToWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}
audioVideoBlobs[recordingInterval].audio = blob;
var clusterMaxDuration = 30000;
if (audioVideoBlobs[recordingInterval].video && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) {
audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true;
fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]);
var EBML = [{
'id': 0x1a45dfa3, // EBML
'data': [{
'data': 1,
'id': 0x4286 // EBMLVersion
}, {
'data': 1,
'id': 0x42f7 // EBMLReadVersion
}, {
'data': 4,
'id': 0x42f2 // EBMLMaxIDLength
}, {
'data': 8,
'id': 0x42f3 // EBMLMaxSizeLength
}, {
'data': 'webm',
'id': 0x4282 // DocType
}, {
'data': 2,
'id': 0x4287 // DocTypeVersion
}, {
'data': 2,
'id': 0x4285 // DocTypeReadVersion
}]
}, {
'id': 0x18538067, // Segment
'data': [{
'id': 0x1549a966, // Info
'data': [{
'data': 1e6, //do things in millisecs (num of nanosecs for duration scale)
'id': 0x2ad7b1 // TimecodeScale
}, {
'data': 'whammy',
'id': 0x4d80 // MuxingApp
}, {
'data': 'whammy',
'id': 0x5741 // WritingApp
}, {
'data': doubleToString(info.duration),
'id': 0x4489 // Duration
}]
}, {
'id': 0x1654ae6b, // Tracks
'data': [{
'id': 0xae, // TrackEntry
'data': [{
'data': 1,
'id': 0xd7 // TrackNumber
}, {
'data': 1,
'id': 0x73c5 // TrackUID
}, {
'data': 0,
'id': 0x9c // FlagLacing
}, {
'data': 'und',
'id': 0x22b59c // Language
}, {
'data': 'V_VP8',
'id': 0x86 // CodecID
}, {
'data': 'VP8',
'id': 0x258688 // CodecName
}, {
'data': 1,
'id': 0x83 // TrackType
}, {
'id': 0xe0, // Video
'data': [{
'data': info.width,
'id': 0xb0 // PixelWidth
}, {
'data': info.height,
'id': 0xba // PixelHeight
}]
}]
}]
}]
}];
//Generate clusters (max duration)
var frameNumber = 0;
var clusterTimecode = 0;
while (frameNumber < frames.length) {
var clusterFrames = [];
var clusterDuration = 0;
do {
clusterFrames.push(frames[frameNumber]);
clusterDuration += frames[frameNumber].duration;
frameNumber++;
} while (frameNumber < frames.length && clusterDuration < clusterMaxDuration);
var clusterCounter = 0;
var cluster = {
'id': 0x1f43b675, // Cluster
'data': getClusterData(clusterTimecode, clusterCounter, clusterFrames)
}; //Add cluster to segment
EBML[1].data.push(cluster);
clusterTimecode += clusterDuration;
}
};
videoRecorder.ondataavailable = function(blob) {
if (isFirefox) {
return self.ondataavailable({
video: blob,
audio: blob
return generateEBML(EBML);
}
function getClusterData(clusterTimecode, clusterCounter, clusterFrames) {
return [{
'data': clusterTimecode,
'id': 0xe7 // Timecode
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({
discardable: 0,
frame: webp.data.slice(4),
invisible: 0,
keyframe: 1,
lacing: 0,
trackNum: 1,
timecode: Math.round(clusterCounter)
});
clusterCounter += webp.duration;
return {
data: block,
id: 0xa3
};
}));
}
// sums the lengths of all the frames and gets the duration
function checkFrames(frames) {
if (!frames[0]) {
postMessage({
error: 'Something went wrong. Maybe WebP format is not supported in the current browser.'
});
return;
}
if (!audioVideoBlobs[recordingInterval]) {
audioVideoBlobs[recordingInterval] = {};
var width = frames[0].width,
height = frames[0].height,
duration = frames[0].duration;
for (var i = 1; i < frames.length; i++) {
duration += frames[i].duration;
}
return {
duration: duration,
width: width,
height: height
};
}
audioVideoBlobs[recordingInterval].video = blob;
function numToBuffer(num) {
var parts = [];
while (num > 0) {
parts.push(num & 0xff);
num = num >> 8;
}
return new Uint8Array(parts.reverse());
}
if (audioVideoBlobs[recordingInterval].audio && !audioVideoBlobs[recordingInterval].onDataAvailableEventFired) {
audioVideoBlobs[recordingInterval].onDataAvailableEventFired = true;
fireOnDataAvailableEvent(audioVideoBlobs[recordingInterval]);
function strToBuffer(str) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);
}));
}
function bitsToBuffer(bits) {
var data = [];
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data.push(parseInt(bits.substr(i, 8), 2));
}
};
return new Uint8Array(data);
}
function fireOnDataAvailableEvent(blobs) {
recordingInterval++;
self.ondataavailable(blobs);
function generateEBML(json) {
var ebml = [];
for (var i = 0; i < json.length; i++) {
var data = json[i].data;
if (typeof data === 'object') {
data = generateEBML(data);
}
if (typeof data === 'number') {
data = bitsToBuffer(data.toString(2));
}
if (typeof data === 'string') {
data = strToBuffer(data);
}
var len = data.size || data.byteLength || data.length;
var zeroes = Math.ceil(Math.ceil(Math.log(len) / Math.log(2)) / 8);
var sizeToString = len.toString(2);
var padded = (new Array((zeroes * 7 + 7 + 1) - sizeToString.length)).join('0') + sizeToString;
var size = (new Array(zeroes)).join('0') + '1' + padded;
ebml.push(numToBuffer(json[i].id));
ebml.push(bitsToBuffer(size));
ebml.push(data);
}
return new Blob(ebml, {
type: 'video/webm'
});
}
videoRecorder.onstop = audioRecorder.onstop = function(error) {
self.onstop(error);
};
function toBinStrOld(bits) {
var data = '';
var pad = (bits.length % 8) ? (new Array(1 + 8 - (bits.length % 8))).join('0') : '';
bits = pad + bits;
for (var i = 0; i < bits.length; i += 8) {
data += String.fromCharCode(parseInt(bits.substr(i, 8), 2));
}
return data;
}
if (!isFirefox) {
// to make sure both audio/video are synced.
videoRecorder.onStartedDrawingNonBlankFrames = function() {
videoRecorder.clearOldRecordedFrames();
audioRecorder.start(timeSlice);
function makeSimpleBlock(data) {
var flags = 0;
if (data.keyframe) {
flags |= 128;
}
if (data.invisible) {
flags |= 8;
}
if (data.lacing) {
flags |= (data.lacing << 1);
}
if (data.discardable) {
flags |= 1;
}
if (data.trackNum > 127) {
throw 'TrackNumber > 127 not supported';
}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);
}).join('') + data.frame;
return out;
}
function parseWebP(riff) {
var VP8 = riff.RIFF[0].WEBP[0];
var frameStart = VP8.indexOf('\x9d\x01\x2a'); // A VP8 keyframe starts with the 0x9d012a header
for (var i = 0, c = []; i < 4; i++) {
c[i] = VP8.charCodeAt(frameStart + 3 + i);
}
var width, height, tmp;
//the code below is literally copied verbatim from the bitstream spec
tmp = (c[1] << 8) | c[0];
width = tmp & 0x3FFF;
tmp = (c[3] << 8) | c[2];
height = tmp & 0x3FFF;
return {
width: width,
height: height,
data: VP8,
riff: riff
};
videoRecorder.start(timeSlice);
} else {
videoRecorder.start(timeSlice);
}
};
this.stop = function() {
if (audioRecorder) audioRecorder.stop();
if (videoRecorder) videoRecorder.stop();
};
function getStrLength(string, offset) {
return parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);
return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;
}).join(''), 2);
}
this.ondataavailable = function(blob) {
console.log('ondataavailable..', blob);
};
function parseRIFF(string) {
var offset = 0;
var chunks = {};
this.onstop = function(error) {
console.warn('stopped..', error);
};
while (offset < string.length) {
var id = string.substr(offset, 4);
var len = getStrLength(string, offset);
var data = string.substr(offset + 4 + 4, len);
offset += 4 + 4 + len;
chunks[id] = chunks[id] || [];
var audioRecorder;
var videoRecorder;
if (id === 'RIFF' || id === 'LIST') {
chunks[id].push(parseRIFF(data));
} else {
chunks[id].push(data);
}
}
return chunks;
}
var audioVideoBlobs = {};
var recordingInterval = 0;
}
function doubleToString(num) {
return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);
}).reverse().join('');
}
function bytesToSize(bytes) {
var k = 1000;
var sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) {
return '0 Bytes';
var webm = new ArrayToWebM(frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));
webp.duration = frame.duration;
return webp;
}));
postMessage(webm);
}
var i = parseInt(Math.floor(Math.log(bytes) / Math.log(k)), 10);
return (bytes / Math.pow(k, i)).toPrecision(3) + ' ' + sizes[i];
}
/**
* Encodes frames in WebM container. It uses WebWorkinvoke to invoke 'ArrayToWebM' method.
* @param {function} callback - Callback function, that is used to pass recorded blob back to the callee.
* @method
* @memberof Whammy
* @example
* recorder = new Whammy().Video(0.8, 100);
* recorder.compile(function(blob) {
* // blob.size - blob.type
* });
*/
WhammyVideo.prototype.compile = function(callback) {
var webWorker = processInWebWorker(whammyInWebWorker);
webWorker.onmessage = function(event) {
if (event.data.error) {
console.error(event.data.error);
return;
}
callback(event.data);
};
webWorker.postMessage(this.frames);
};
return {
/**
* A more abstract-ish API.
* @method
* @memberof Whammy
* @example
* recorder = new Whammy().Video(0.8, 100);
* @param {?number} speed - 0.8
* @param {?number} quality - 100
*/
Video: WhammyVideo
};
})();
{
"name": "msr",
"preferGlobal": true,
"version": "1.2.8",
"version": "1.2.9",
"author": {

@@ -6,0 +6,0 @@ "name": "Muaz Khan",

@@ -1,3 +0,5 @@

## [MediaStreamRecorder.js](https://github.com/streamproc/MediaStreamRecorder) - [Demos](https://www.webrtc-experiment.com/msr/) - [![npm](https://img.shields.io/npm/v/msr.svg)](https://npmjs.org/package/msr) [![downloads](https://img.shields.io/npm/dm/msr.svg)](https://npmjs.org/package/msr)
# [MediaStreamRecorder.js](https://github.com/streamproc/MediaStreamRecorder) - [Demos](https://www.webrtc-experiment.com/msr/)
[![npm](https://img.shields.io/npm/v/msr.svg)](https://npmjs.org/package/msr) [![downloads](https://img.shields.io/npm/dm/msr.svg)](https://npmjs.org/package/msr) [![Build Status: Linux](https://travis-ci.org/streamproc/MediaStreamRecorder.png?branch=master)](https://travis-ci.org/streamproc/MediaStreamRecorder)
A cross-browser implementation to record audio/video streams:

@@ -21,3 +23,3 @@

There is a similar project: **RecordRTC**! [Demo](https://www.webrtc-experiment.com/RecordRTC/) - [Documentation](https://github.com/muaz-khan/WebRTC-Experiment/tree/master/RecordRTC)
There is a similar project: **RecordRTC**! [Demo](https://www.webrtc-experiment.com/RecordRTC/) - [Documentation](https://github.com/muaz-khan/RecordRTC)

@@ -42,7 +44,7 @@ ## How to link scripts?

<script src="https://cdn.webrtc-experiment.com/MediaStreamRecorder.js"> </script>
```
## Otherwise, you can link specific files:
<!-- or -->
* https://github.com/streamproc/MediaStreamRecorder/blob/master/How-to-Link-Specific-Files.md
https://cdn.rawgit.com/streamproc/MediaStreamRecorder/master/MediaStreamRecorder.js
```

@@ -180,2 +182,24 @@ ## Record audio+video in Firefox in single WebM

## How to pause recordings?
```javascript
mediaRecorder.pause();
```
## How to resume recordings?
```javascript
mediaRecorder.resume();
```
## How to save recordings?
```javascript
// invoke save-as dialog for all recorded blobs
mediaRecorder.save();
// or pass external blob/file
mediaRecorder.save(YourExternalBlob, 'FileName.webm');
```
## How to upload recorded files using PHP?

@@ -231,4 +255,28 @@

## audioChannels
## `recorderType`
You can force StereoAudioRecorder or WhammyRecorder or similar records on Firefox or Edge; even on Chrome and Opera.
All browsers will be using your specified recorder:
```javascript
// force WebAudio API on all browsers
// it allows you record remote audio-streams in Firefox
// it also works in Microsoft Edge
mediaRecorder.type = StereoAudioRecorder;
// force webp based webm encoder on all browsers
mediaRecorder.type = WhammyRecorder;
// force MediaRecorder API on all browsers
// Chrome is going to implement MediaRecorder API soon;
// so this property allows you force MediaRecorder in Chrome.
mediaRecorder.type = MediaRecorderWrapper;
// force GifRecorder in all browsers. Both WhammyRecorder and MediaRecorder API will be ignored.
mediaRecorder.type = GifRecorder;
```
## `audioChannels`
It is an integer value that accepts either 1 or 2. "1" means record only left-channel and skip right-one. The default value is "2".

@@ -240,3 +288,3 @@

## bufferSize
## `bufferSize`

@@ -249,3 +297,3 @@ You can set following audio-bufferSize values: 0, 256, 512, 1024, 2048, 4096, 8192, and 16384. "0" means: let chrome decide the device's default bufferSize. Default value is "2048".

## sampleRate
## `sampleRate`

@@ -261,3 +309,3 @@ Default "sampleRate" value is "44100". Currently you can't modify sample-rate in windows that's why this property isn't yet exposed to public API.

## video
## `video`

@@ -276,2 +324,38 @@ It is recommended to pass your HTMLVideoElement to get most accurate result.

## `stop`
This method allows you stop recording.
```javascript
mediaRecorder.stop();
```
## `pause`
This method allows you pause recording.
```javascript
mediaRecorder.pause();
```
## `resume`
This method allows you resume recording.
```javascript
mediaRecorder.resume();
```
## `save`
This method allows you save recording to disk (via save-as dialog).
```javascript
// invoke save-as dialog for all recorded blobs
mediaRecorder.save();
// or pass external blob/file
mediaRecorder.save(YourExternalBlob, 'FileName.webm');
```
## canvas

@@ -370,2 +454,3 @@

| Android | [Chrome](https://play.google.com/store/apps/details?id=com.chrome.beta&hl=en) / [Firefox](https://play.google.com/store/apps/details?id=org.mozilla.firefox) / [Opera](https://play.google.com/store/apps/details?id=com.opera.browser) |
| Microsoft Edge | [Normal Build](https://www.microsoft.com/en-us/windows/microsoft-edge) |

@@ -372,0 +457,0 @@ ## Contributors

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc