New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

recordrtc

Package Overview
Dependencies
Maintainers
1
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

recordrtc - npm Package Compare versions

Comparing version 4.0.4 to 4.0.5

8

package.json
{
"name": "recordrtc",
"preferGlobal": true,
"version": "4.0.4",
"version": "4.0.5",
"author": {

@@ -21,3 +21,3 @@ "name": "Muaz Khan",

"webrtc",
"RecordRTC",
"recordrtc",
"audio-recording",

@@ -35,5 +35,5 @@ "video-recording",

},
"homepage": "https://www.webrtc-experiment.com/RecordRTC/",
"_id": "recordrtc@4.0.4",
"homepage": "http://RecordRTC.org/",
"_id": "recordrtc@",
"_from": "recordrtc@"
}

@@ -1,5 +0,9 @@

## [RecordRTC](https://github.com/muaz-khan/WebRTC-Experiment/tree/master/RecordRTC): [WebRTC](https://www.webrtc-experiment.com/) audio/video recording / [Demo](https://www.webrtc-experiment.com/RecordRTC/) [![npm](https://img.shields.io/npm/v/recordrtc.svg)](https://npmjs.org/package/recordrtc) [![downloads](https://img.shields.io/npm/dm/recordrtc.svg)](https://npmjs.org/package/recordrtc)
## [RecordRTC](https://github.com/muaz-khan/RecordRTC): [WebRTC](https://www.webrtc-experiment.com/) audio/video recording / [Wiki Pages](https://github.com/muaz-khan/RecordRTC/wiki) / [Demo](https://www.webrtc-experiment.com/RecordRTC/)
[![npm](https://img.shields.io/npm/v/recordrtc.svg)](https://npmjs.org/package/recordrtc) [![downloads](https://img.shields.io/npm/dm/recordrtc.svg)](https://npmjs.org/package/recordrtc)
[RecordRTC](https://www.webrtc-experiment.com/RecordRTC/) is a server-less (entire client-side) JavaScript library can be used to record WebRTC audio/video media streams. It supports cross-browser audio/video recording.
> Documentation & Demos: http://RecordRTC.org
```javascript

@@ -15,6 +19,2 @@ // Browsers Support::

<a href="https://nodei.co/npm/recordrtc/">
<img src="https://nodei.co/npm/recordrtc.png">
</a>
```

@@ -34,4 +34,2 @@ npm install recordrtc

=
## How RecordRTC encodes wav/webm?

@@ -56,2 +54,3 @@

10. [Record Audio and upload to Nodejs server](https://www.npmjs.org/package/record-audio)
11. [ConcatenateBlobs.js](https://github.com/muaz-khan/ConcatenateBlobs) - Concatenate multiple recordings in single Blob!

@@ -61,9 +60,10 @@ =

```html
<script src="http://RecordRTC.org/latest.js"></script>
<!-- or -->
<script src="//cdn.WebRTC-Experiment.com/RecordRTC.js"></script>
```
=
## Record audio+video in Firefox
#### How to record audio+video on Firefox >= 29?
You'll be recording both audio/video in single WebM container. Though you can edit RecordRTC.js to record in mp4.

@@ -77,4 +77,6 @@

var recordRTC;
navigator.getUserMedia(session, function (mediaStream) {
window.recordRTC = RecordRTC(MediaStream);
recordRTC = RecordRTC(MediaStream);
recordRTC.startRecording();

@@ -85,3 +87,6 @@ }, onError);

recordRTC.stopRecording(function (audioVideoWebMURL) {
window.open(audioVideoWebMURL);
video.src = audioVideoWebMURL;
var recordedBlob = recordRTC.getBlob();
recordRTC.getDataURL(function(dataURL) { });
});

@@ -93,6 +98,4 @@ };

=
## Record only Audio
#### How to record audio?
```javascript

@@ -102,3 +105,6 @@ var recordRTC = RecordRTC(mediaStream);

recordRTC.stopRecording(function(audioURL) {
mediaElement.src = audioURL;
audio.src = audioURL;
var recordedBlob = recordRTC.getBlob();
recordRTC.getDataURL(function(dataURL) { });
});

@@ -110,4 +116,6 @@ ```

```javascript
var recordRTC;
navigator.getUserMedia({audio: true}, function(mediaStream) {
window.recordRTC = RecordRTC(MediaStream);
recordRTC = RecordRTC(MediaStream);
recordRTC.startRecording();

@@ -118,3 +126,6 @@ });

recordRTC.stopRecording(function(audioURL) {
window.open(audioURL);
audio.src = audioURL;
var recordedBlob = recordRTC.getBlob();
recordRTC.getDataURL(function(dataURL) { });
});

@@ -131,3 +142,3 @@ };

#### How to fix audio echo issues?
## Echo Issues

@@ -137,14 +148,26 @@ Simply set `volume=0` or `muted=true`:

```javascript
navigator.getUserMedia({audio: true}, function(mediaStream) {
audioElement.volume = 0;
audioElement.src = URL.createObjectURL(mediaStream);
audioElement.play();
});
navigator.getUserMedia({
audio: {
mandatory: {
googEchoCancellation: false,
googAutoGainControl: false,
googNoiseSuppression: false,
googHighpassFilter: false
},
optional: []
},
}, onSuccess, onFailure);
var recordRTC;
function onSuccess(mediaStream) {
recordRTC = RecordRTC(mediaStream);
recordRTC.startRecording();
}
```
Or otherwise use `googEchoCancellation` and other experimental constraints from [here](https://chromium.googlesource.com/external/webrtc/+/master/talk/app/webrtc/mediaconstraintsinterface.cc).
Constraints Reference:
=
* https://chromium.googlesource.com/external/webrtc/+/master/talk/app/webrtc/mediaconstraintsinterface.cc
#### How to record video?
## Record Video

@@ -160,9 +183,12 @@ Everything is optional except `type:'video'`:

recordRTC.stopRecording(function(videoURL) {
mediaElement.src = videoURL;
video.src = videoURL;
var recordedBlob = recordRTC.getBlob();
recordRTC.getDataURL(function(dataURL) { });
});
```
=
## `onAudioProcessStarted`
#### How to fix audio/video sync issues on chrome?
Useful to recover audio/video sync issues inside the browser:

@@ -188,10 +214,8 @@ ```javascript

=
## Record animated GIF image
##### How to record animated GIF image?
Everything is optional except `type:'gif'`:
```javascript
// you must link:
// you must "manually" link:
// https://cdn.webrtc-experiment.com/gif-recorder.js

@@ -211,11 +235,9 @@

=
## Record a Webpage
##### How to record HTML2Canvas?
You can say it: "HTML/Canvas Recording using RecordRTC"!
```html
<script src="//www.WebRTC-Experiment.com/RecordRTC.js"></script>
<script src="//www.webrtc-experiment.com/screenshot.js"></script>
<script src="//cdn.WebRTC-Experiment.com/RecordRTC.js"></script>
<script src="//cdn.webrtc-experiment.com/screenshot.js"></script>
<div id="elementToShare" style="width:100%;height:100%;background:green;"></div>

@@ -228,4 +250,7 @@ <script>

recordRTC.startRecording();
recordRTC.stopRecording(function(url) {
window.open(url);
recordRTC.stopRecording(function(videoURL) {
video.src = videoURL;
var recordedBlob = recordRTC.getBlob();
recordRTC.getDataURL(function(dataURL) { });
});

@@ -237,5 +262,5 @@ </script>

=
# API Reference
##### `autoWriteToDisk`
## `autoWriteToDisk`

@@ -252,6 +277,4 @@ Using `autoWriteToDisk`; you can suggest RecordRTC to auto-write to indexed-db as soon as you call `stopRecording` method.

=
## `writeToDisk`
##### `writeToDisk`
You can write recorded blob to disk using `writeToDisk` method:

@@ -264,6 +287,4 @@

=
## `getFromDisk`
##### `getFromDisk`
You can get recorded blob from disk using `getFromDisk` method:

@@ -304,6 +325,4 @@

=
## How to set video width/height?
##### How to set video width/height?
```javascript

@@ -323,6 +342,4 @@ var options = {

=
## `getDataURL`
##### How to get DataURL?
```javascript

@@ -334,6 +351,4 @@ recordRTC.getDataURL(function(dataURL) {

=
## `getBlob`
##### How to get `Blob` object?
```javascript

@@ -343,6 +358,4 @@ blob = recordRTC.getBlob();

=
## `toURL`
##### How to get Virtual-URL?
```javascript

@@ -352,6 +365,4 @@ window.open( recordRTC.toURL() );

=
## `save`
##### How to invoke save-to-disk dialog?
```javascript

@@ -361,5 +372,5 @@ recordRTC.save();

=
## `bufferSize`
##### How to customize Buffer-Size for audio recording?
Here is how to customize Buffer-Size for audio recording?

@@ -398,5 +409,5 @@ ```javascript

=
## `sampleRate`
##### How to customize Sample-Rate for audio recording?
Here is jow to customize Sample-Rate for audio recording?

@@ -431,24 +442,18 @@ ```javascript

=
# Clarifications
##### Is WinXP supported?
## Is WinXP supported?
No WinXP SP2 support. However, RecordRTC works on WinXP Service Pack 3.
No WinXP SP2 based "Chrome" support. However, RecordRTC works on WinXP Service Pack 3.
=
## Is Chrome on Android supported?
##### Is Chrome on Android supported?
RecordRTC uses WebAudio API for stereo-audio recording. AFAIK, WebAudio is not supported on android chrome releases, yet.
=
## Stereo or Mono?
##### Stereo or Mono?
Audio recording fails for `mono` audio. So, try `stereo` audio only.
=
## Possible issues/failures:
##### Possible issues/failures:
Do you know "RecordRTC" fails recording audio because following conditions fails:

@@ -463,6 +468,4 @@

=
## Web Audio APIs requirements
##### Web Audio APIs requirements
1. If you're on Windows, you have to be running WinXP SP3, Windows Vista or better (will not work on Windows XP SP2 or earlier).

@@ -472,6 +475,4 @@ 2. On Windows, audio input hardware must be set to the same sample rate as audio output hardware.

=
## Why stereo?
##### Why stereo?
If you explorer chromium code; you'll see that some APIs can only be successfully called for `WAV` files with `stereo` audio.

@@ -483,4 +484,2 @@

=
Media Stream Recording API (MediaRecorder object) is being implemented by both Firefox and Chrome. RecordRTC is also using MediaRecorder API for Firefox (nightly).

@@ -490,6 +489,4 @@

=
## Browser Support
##### Browser Support
[RecordRTC Demo](https://www.webrtc-experiment.com/RecordRTC/) works fine on following web-browsers:

@@ -499,9 +496,9 @@

| ------------- |-------------|
| Firefox | [Stable](http://www.mozilla.org/en-US/firefox/new/) / [Aurora](http://www.mozilla.org/en-US/firefox/aurora/) / [Nightly](http://nightly.mozilla.org/) |
| Google Chrome | [Stable](https://www.google.com/intl/en_uk/chrome/browser/) / [Canary](https://www.google.com/intl/en/chrome/browser/canary.html) / [Beta](https://www.google.com/intl/en/chrome/browser/beta.html) / [Dev](https://www.google.com/intl/en/chrome/browser/index.html?extra=devchannel#eula) |
| Firefox | [Nightly](http://nightly.mozilla.org/) |
| Opera | [Stable](http://www.opera.com/) / [NEXT](http://www.opera.com/computer/next) |
| Android | [Chrome](https://play.google.com/store/apps/details?id=com.chrome.beta&hl=en) / [Firefox](https://play.google.com/store/apps/details?id=org.mozilla.firefox) / [Opera](https://play.google.com/store/apps/details?id=com.opera.browser) |
=
## Credits
##### Credits
1. [Recorderjs](https://github.com/mattdiamond/Recorderjs) for audio recording

@@ -511,6 +508,4 @@ 2. [whammy](https://github.com/antimatter15/whammy) for video recording

=
## Spec & Reference
##### Spec & Reference
1. [Web Audio API](https://dvcs.w3.org/hg/audio/raw-file/tip/webaudio/specification.html)

@@ -522,6 +517,4 @@ 2. [MediaRecorder](https://wiki.mozilla.org/Gecko:MediaRecorder)

=
## License
[RecordRTC.js](https://github.com/muaz-khan/WebRTC-Experiment/tree/master/RecordRTC) is released under [MIT licence](https://www.webrtc-experiment.com/licence/) . Copyright (c) [Muaz Khan](https://plus.google.com/+MuazKhan).
[RecordRTC.js](https://github.com/muaz-khan/RecordRTC) is released under [MIT licence](https://www.webrtc-experiment.com/licence/) . Copyright (c) [Muaz Khan](https://plus.google.com/+MuazKhan).

@@ -1,13 +0,16 @@

// Last time updated at Sep 07, 2014, 08:32:23
// Last time updated at Oct 13, 2014, 08:32:23
// updates?
/*
-. onGifPreview added.
-. You can set options.video = HTMLVideoElement;
-. You can get blob directly using "recordRTC.blob" property
-. You can get "ArrayBuffer" as well using "recordRTC.buffer" property
-. You can get "DataView" as well using "recordRTC.view" property
-. You can get "Sample-Rates" using "recordRTC.sampleRate" property
-. You can get "Buffer-Size" using "recordRTC.bufferSize" property
-. if you're recording GIF, you must link: https://cdn.webrtc-experiment.com/gif-recorder.js
*/
// issues?
/*
-. audio self-playback (ehco/noise/etc.)
-. it seems that RecordRTC is cutting off the last couple of seconds of recordings
*/
//------------------------------------

@@ -24,5 +27,5 @@

//------------------------------------
// Muaz Khan - www.MuazKhan.com
// MIT License - www.WebRTC-Experiment.com/licence
// Documentation - github.com/muaz-khan/WebRTC-Experiment/tree/master/RecordRTC
// Muaz Khan - www.MuazKhan.com
// MIT License - www.WebRTC-Experiment.com/licence
// Documentation - https://github.com/muaz-khan/RecordRTC
//------------------------------------

@@ -74,5 +77,8 @@ // Note: RecordRTC.js is using 3 other libraries; you need to accept their licences as well.

mediaRecorder = mergeProps(mediaRecorder, config);
mediaRecorder.onAudioProcessStarted = function () {
mediaRecorder.onAudioProcessStarted = function() {
if (config.onAudioProcessStarted) config.onAudioProcessStarted();
};
mediaRecorder.onGifPreview = function(gif) {
if (config.onGifPreview) config.onGifPreview(gif);
};

@@ -87,2 +93,4 @@ mediaRecorder.record();

var recordRTC = this;
console.warn('stopped recording ' + config.type + ' stream.');

@@ -98,3 +106,7 @@

function _callback() {
var blob = mediaRecorder.recordedBlob;
for (var item in mediaRecorder) {
recordRTC[item] = mediaRecorder[item];
}
var blob = mediaRecorder.blob;
if (callback) {

@@ -104,11 +116,12 @@ var url = URL.createObjectURL(blob);

}
console.debug(blob.type, '->', bytesToSize(blob.size));
if (config.autoWriteToDisk) {
getDataURL(function (dataURL) {
var parameter = {};
parameter[config.type + 'Blob'] = dataURL;
DiskStorage.Store(parameter);
});
}
if (!config.autoWriteToDisk) return;
getDataURL(function(dataURL) {
var parameter = {};
parameter[config.type + 'Blob'] = dataURL;
DiskStorage.Store(parameter);
});
}

@@ -119,6 +132,6 @@ }

if (!callback) throw 'Pass a callback function over getDataURL.';
var recordedBlob = _mediaRecorder ? _mediaRecorder.recordedBlob : mediaRecorder.recordedBlob;
if(!recordedBlob) {
var blob = _mediaRecorder ? _mediaRecorder.blob : mediaRecorder.blob;
if (!blob) {
console.warn('Blob encoder did not yet finished its job.');

@@ -139,11 +152,11 @@ setTimeout(function() {

webWorker.onmessage = function (event) {
webWorker.onmessage = function(event) {
callback(event.data);
};
webWorker.postMessage(recordedBlob);
webWorker.postMessage(blob);
} else {
var reader = new FileReader();
reader.readAsDataURL(recordedBlob);
reader.onload = function (event) {
reader.readAsDataURL(blob);
reader.onload = function(event) {
callback(event.target.result);

@@ -174,12 +187,12 @@ };

stopRecording: stopRecording,
getBlob: function () {
getBlob: function() {
if (!mediaRecorder) return console.warn(WARNING);
return mediaRecorder.recordedBlob;
return mediaRecorder.blob;
},
getDataURL: getDataURL,
toURL: function () {
toURL: function() {
if (!mediaRecorder) return console.warn(WARNING);
return URL.createObjectURL(mediaRecorder.recordedBlob);
return URL.createObjectURL(mediaRecorder.blob);
},
save: function (fileName) {
save: function(fileName) {
if (!mediaRecorder) {

@@ -194,5 +207,5 @@ var that = this;

var hyperlink = document.createElement('a');
hyperlink.href = URL.createObjectURL(mediaRecorder.recordedBlob);
hyperlink.href = URL.createObjectURL(mediaRecorder.blob);
hyperlink.target = '_blank';
hyperlink.download = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + mediaRecorder.recordedBlob.type.split('/')[1];
hyperlink.download = (fileName || (Math.round(Math.random() * 9999999999) + 888888888)) + '.' + mediaRecorder.blob.type.split('/')[1];

@@ -209,7 +222,7 @@ var evt = new MouseEvent('click', {

},
getFromDisk: function (callback) {
getFromDisk: function(callback) {
if (!mediaRecorder) return console.warn(WARNING);
RecordRTC.getFromDisk(config.type, callback);
},
setAdvertisementArray: function (arrayOfWebPImages) {
setAdvertisementArray: function(arrayOfWebPImages) {
this.advertisement = [];

@@ -228,7 +241,7 @@

RecordRTC.getFromDisk = function (type, callback) {
RecordRTC.getFromDisk = function(type, callback) {
if (!callback) throw 'callback is mandatory.';
console.log('Getting recorded ' + (type == 'all' ? 'blobs' : type + ' blob ') + ' from disk!');
DiskStorage.Fetch(function (dataURL, _type) {
DiskStorage.Fetch(function(dataURL, _type) {
if (type != 'all' && _type == type + 'Blob') {

@@ -244,9 +257,9 @@ if (callback) callback(dataURL);

RecordRTC.writeToDisk = function (options) {
RecordRTC.writeToDisk = function(options) {
console.log('Writing recorded blob(s) to disk!');
options = options || {};
if (options.audio && options.video && options.gif) {
options.audio.getDataURL(function (audioDataURL) {
options.video.getDataURL(function (videoDataURL) {
options.gif.getDataURL(function (gifDataURL) {
options.audio.getDataURL(function(audioDataURL) {
options.video.getDataURL(function(videoDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({

@@ -261,4 +274,4 @@ audioBlob: audioDataURL,

} else if (options.audio && options.video) {
options.audio.getDataURL(function (audioDataURL) {
options.video.getDataURL(function (videoDataURL) {
options.audio.getDataURL(function(audioDataURL) {
options.video.getDataURL(function(videoDataURL) {
DiskStorage.Store({

@@ -271,4 +284,4 @@ audioBlob: audioDataURL,

} else if (options.audio && options.gif) {
options.audio.getDataURL(function (audioDataURL) {
options.gif.getDataURL(function (gifDataURL) {
options.audio.getDataURL(function(audioDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({

@@ -281,4 +294,4 @@ audioBlob: audioDataURL,

} else if (options.video && options.gif) {
options.video.getDataURL(function (videoDataURL) {
options.gif.getDataURL(function (gifDataURL) {
options.video.getDataURL(function(videoDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({

@@ -291,3 +304,3 @@ videoBlob: videoDataURL,

} else if (options.audio) {
options.audio.getDataURL(function (audioDataURL) {
options.audio.getDataURL(function(audioDataURL) {
DiskStorage.Store({

@@ -298,3 +311,3 @@ audioBlob: audioDataURL

} else if (options.video) {
options.video.getDataURL(function (videoDataURL) {
options.video.getDataURL(function(videoDataURL) {
DiskStorage.Store({

@@ -305,3 +318,3 @@ videoBlob: videoDataURL

} else if (options.gif) {
options.gif.getDataURL(function (gifDataURL) {
options.gif.getDataURL(function(gifDataURL) {
DiskStorage.Store({

@@ -318,3 +331,3 @@ gifBlob: gifDataURL

function MRecordRTC(mediaStream) {
this.addStream = function (_mediaStream) {
this.addStream = function(_mediaStream) {
if (_mediaStream) mediaStream = _mediaStream;

@@ -328,8 +341,8 @@ };

this.startRecording = function () {
if(!IsChrome && mediaStream && mediaStream.getAudioTracks().length && mediaStream.getVideoTracks().length) {
this.startRecording = function() {
if (!IsChrome && mediaStream && mediaStream.getAudioTracks().length && mediaStream.getVideoTracks().length) {
// Firefox is supporting both audio/video in single blob
this.mediaType.audio = false;
}
if (this.mediaType.audio) {

@@ -354,7 +367,7 @@ this.audioRecorder = RecordRTC(mediaStream, this).startRecording();

this.stopRecording = function (callback) {
callback = callback || function () {};
this.stopRecording = function(callback) {
callback = callback || function() {};
if (this.audioRecorder) {
this.audioRecorder.stopRecording(function (blobURL) {
this.audioRecorder.stopRecording(function(blobURL) {
callback(blobURL, 'audio');

@@ -365,3 +378,3 @@ });

if (this.videoRecorder) {
this.videoRecorder.stopRecording(function (blobURL) {
this.videoRecorder.stopRecording(function(blobURL) {
callback(blobURL, 'video');

@@ -372,3 +385,3 @@ });

if (this.gifRecorder) {
this.gifRecorder.stopRecording(function (blobURL) {
this.gifRecorder.stopRecording(function(blobURL) {
callback(blobURL, 'gif');

@@ -379,3 +392,3 @@ });

this.getBlob = function (callback) {
this.getBlob = function(callback) {
var output = {};

@@ -397,6 +410,6 @@

this.getDataURL = function (callback) {
this.getBlob(function (blob) {
getDataURL(blob.audio, function (_audioDataURL) {
getDataURL(blob.video, function (_videoDataURL) {
this.getDataURL = function(callback) {
this.getBlob(function(blob) {
getDataURL(blob.audio, function(_audioDataURL) {
getDataURL(blob.video, function(_videoDataURL) {
callback({

@@ -416,3 +429,3 @@ audio: _audioDataURL,

webWorker.onmessage = function (event) {
webWorker.onmessage = function(event) {
callback00(event.data);

@@ -422,7 +435,6 @@ };

webWorker.postMessage(blob);
}
else {
} else {
var reader = new FileReader();
reader.readAsDataURL(blob);
reader.onload = function (event) {
reader.onload = function(event) {
callback00(event.target.result);

@@ -446,3 +458,3 @@ };

this.writeToDisk = function () {
this.writeToDisk = function() {
RecordRTC.writeToDisk({

@@ -454,3 +466,3 @@ audio: this.audioRecorder,

};
this.save = function(args) {

@@ -462,11 +474,11 @@ args = args || {

};
if(!!args.audio && this.audioRecorder) {
if (!!args.audio && this.audioRecorder) {
this.audioRecorder.save(typeof args.audio == 'string' ? args.audio : '');
}
if(!!args.video && this.videoRecorder) {
if (!!args.video && this.videoRecorder) {
this.videoRecorder.save(typeof args.video == 'string' ? args.video : '');
}
if(!!args.gif && this.gifRecorder) {
if (!!args.gif && this.gifRecorder) {
this.gifRecorder.save(typeof args.gif == 'string' ? args.gif : '');

@@ -568,3 +580,3 @@ }

var dataAvailable = false;
this.record = function () {
this.record = function() {
// http://dxr.mozilla.org/mozilla-central/source/content/media/MediaRecorder.cpp

@@ -579,3 +591,3 @@ // https://wiki.mozilla.org/Gecko:MediaRecorder

// Dispatching OnDataAvailable Handler
mediaRecorder.ondataavailable = function (e) {
mediaRecorder.ondataavailable = function(e) {
if (dataAvailable) return;

@@ -588,8 +600,4 @@

// todo: need to check who commented following two lines and why?
// pull #118
// if (self.recordedBlob) self.recordedBlob = new Blob([self.recordedBlob, e.data], { type: e.data.type || 'audio/ogg' });
dataAvailable = true;
self.recordedBlob = new Blob([e.data], {
self.blob = new Blob([e.data], {
type: e.data.type || 'audio/ogg'

@@ -600,3 +608,3 @@ });

mediaRecorder.onerror = function (error) {
mediaRecorder.onerror = function(error) {
console.warn(error);

@@ -621,3 +629,3 @@ mediaRecorder.stop();

this.stop = function (callback) {
this.stop = function(callback) {
this.callback = callback;

@@ -642,20 +650,20 @@ // mediaRecorder.state == 'recording' means that media recorder is associated with "session"

function StereoRecorder(mediaStream) {
this.record = function () {
var self = this;
this.record = function() {
mediaRecorder = new StereoAudioRecorder(mediaStream, this);
var self = this;
mediaRecorder.onAudioProcessStarted = function () {
mediaRecorder.onAudioProcessStarted = function() {
if (self.onAudioProcessStarted) self.onAudioProcessStarted();
};
mediaRecorder.record();
};
this.stop = function (callback) {
var self = this;
if (mediaRecorder)
mediaRecorder.stop(function () {
self.recordedBlob = mediaRecorder.recordedBlob;
callback();
});
this.stop = function(callback) {
if (!mediaRecorder) return;
mediaRecorder.stop(function() {
for (var item in mediaRecorder) {
self[item] = mediaRecorder[item];
}
callback();
});
};

@@ -672,9 +680,7 @@

// In Chrome, when the javascript node is out of scope, the onaudioprocess callback stops firing.
// This leads to audio being significantly shorter than the generated video.
var __stereoAudioRecorderJavacriptNode;
function StereoAudioRecorder(mediaStream, root) {
if(!mediaStream.getAudioTracks().length) throw 'Your stream has no audio tracks.';
if (!mediaStream.getAudioTracks().length) throw 'Your stream has no audio tracks.';
// variables

@@ -686,3 +692,3 @@ var leftchannel = [];

this.record = function () {
this.record = function() {
// reset the buffers for the new recording

@@ -695,91 +701,85 @@ leftchannel.length = rightchannel.length = 0;

this.stop = function (callback) {
setTimeout(onRecordingStopped, 1000);
this.stop = function(callback) {
// stop recording
recording = false;
function onRecordingStopped() {
// stop recording
recording = false;
audioInput.disconnect();
volume.disconnect();
requestAnimationFrame(function() {
// to make sure onaudioprocess stops firing
audioInput.disconnect();
volume.disconnect();
// flat the left and right channels down
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
var rightBuffer = mergeBuffers(rightchannel, recordingLength);
// flat the left and right channels down
var leftBuffer = mergeBuffers(leftchannel, recordingLength);
var rightBuffer = mergeBuffers(rightchannel, recordingLength);
// interleave both channels together
var interleaved = interleave(leftBuffer, rightBuffer);
// interleave both channels together
var interleaved = interleave(leftBuffer, rightBuffer);
// create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
var view = new DataView(buffer);
// create our wav file
var buffer = new ArrayBuffer(44 + interleaved.length * 2);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// RIFF chunk length
// view.setUint32(4, 44 + interleaved.length * 2, true);
view.setUint32(4, 36 + interleaved.length * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
var view = new DataView(buffer);
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// format chunk length
view.setUint32(16, 16, true);
// sample format (raw)
view.setUint16(20, 1, true);
// RIFF chunk descriptor/identifier
writeUTFBytes(view, 0, 'RIFF');
// stereo (2 channels)
view.setUint16(22, 2, true);
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 4, true);
// block align (channel count * bytes per sample)
view.setUint16(32, 4, true);
// bits per sample
view.setUint16(34, 16, true);
// RIFF chunk length
// view.setUint32(4, 44 + interleaved.length * 2, true);
view.setUint32(4, 36 + interleaved.length * 2, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleaved.length * 2, true);
// RIFF type
writeUTFBytes(view, 8, 'WAVE');
// write the PCM samples
var offset = 44;
for (var i = 0; i < interleaved.length; i++, offset+=2){
var s = Math.max(-1, Math.min(1, interleaved[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
// format chunk identifier
// FMT sub-chunk
writeUTFBytes(view, 12, 'fmt ');
// final binary blob
self.recordedBlob = new Blob([view], {
type: 'audio/wav'
});
// format chunk length
view.setUint32(16, 16, true);
// recorded audio length
self.length = recordingLength;
// sample format (raw)
view.setUint16(20, 1, true);
callback();
// stereo (2 channels)
view.setUint16(22, 2, true);
isAudioProcessStarted = false;
});
// sample rate
view.setUint32(24, sampleRate, true);
// byte rate (sample rate * block align)
view.setUint32(28, sampleRate * 4, true);
// block align (channel count * bytes per sample)
view.setUint16(32, 4, true);
// bits per sample
view.setUint16(34, 16, true);
// data sub-chunk
// data chunk identifier
writeUTFBytes(view, 36, 'data');
// data chunk length
view.setUint32(40, interleaved.length * 2, true);
// write the PCM samples
var offset = 44;
for (var i = 0; i < interleaved.length; i++, offset += 2) {
var s = Math.max(-1, Math.min(1, interleaved[i]));
view.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
var self = this;
// final binary blob
this.blob = new Blob([view], {
type: 'audio/wav'
});
this.buffer = new ArrayBuffer(view);
this.view = view;
// recorded audio length
this.length = recordingLength;
callback();
isAudioProcessStarted = false;
};

@@ -871,2 +871,5 @@

this.sampleRate = sampleRate;
this.bufferSize = bufferSize;
console.log('sample-rate', sampleRate);

@@ -885,6 +888,6 @@ console.log('buffer-size', bufferSize);

self = this;
__stereoAudioRecorderJavacriptNode.onaudioprocess = function (e) {
__stereoAudioRecorderJavacriptNode.onaudioprocess = function(e) {
// if MediaStream().stop() or MediaStreamTrack.stop() is invoked.
if (mediaStream.ended) {
__stereoAudioRecorderJavacriptNode.onaudioprocess = function () {};
__stereoAudioRecorderJavacriptNode.onaudioprocess = function() {};
return;

@@ -926,3 +929,3 @@ }

var isRecording;
this.record = function () {
this.record = function() {
isRecording = true;

@@ -932,10 +935,10 @@ drawCanvasFrame();

this.stop = function (callback) {
this.stop = function(callback) {
isRecording = false;
whammy.frames = dropFirstFrame(frames);
this.recordedBlob = whammy.compile();
this.blob = whammy.compile();
frames = [];
if (callback) callback(this.recordedBlob);
if (callback) callback(this.blob);
};

@@ -947,3 +950,3 @@

html2canvas(htmlElement, {
onrendered: function (canvas) {
onrendered: function(canvas) {
var duration = new Date().getTime() - lastTime;

@@ -974,5 +977,5 @@ if (!duration) return drawCanvasFrame();

function WhammyRecorder(mediaStream) {
this.record = function () {
if (!this.width) this.width = video.offsetWidth || 320;
if (!this.height) this.height = video.offsetHeight || 240;
this.record = function() {
if (!this.width) this.width = 320;
if (!this.height) this.height = 240;

@@ -996,13 +999,25 @@ if (!this.video) {

video.width = this.video.width;
video.height = this.video.height;
context = canvas.getContext('2d');
console.log('canvas width', canvas.width);
console.log('canvas height', canvas.height);
// setting defaults
if (this.video && this.video instanceof HTMLVideoElement) {
video = this.video.cloneNode();
} else {
video = document.createElement('video');
video.src = URL.createObjectURL(mediaStream);
console.log('video width', video.width);
console.log('video height', video.height);
video.width = this.video.width;
video.height = this.video.height;
}
context = canvas.getContext('2d');
video.muted = true;
video.play();
lastTime = new Date().getTime();
whammy = new Whammy.Video()
frames = [];
console.log('canvas resolutions', canvas.width, '*', canvas.height);
console.log('video width/height', video.width, '*', video.height);
drawFrames();

@@ -1038,10 +1053,9 @@ };

this.stop = function (callback) {
this.stop = function(callback) {
isStopDrawing = true;
whammy.frames = dropFirstFrame(frames);
frames = [];
this.recordedBlob = whammy.compile();
this.blob = whammy.compile();
if (callback) callback(this.recordedBlob);
if (callback) callback(this.blob);
};

@@ -1052,12 +1066,5 @@

var video = document.createElement('video');
video.muted = true;
video.volume = 0;
video.autoplay = true;
video.src = URL.createObjectURL(mediaStream);
video.play();
var lastTime = new Date().getTime();
var whammy = new Whammy.Video();
var video;
var lastTime;
var whammy;
}

@@ -1073,6 +1080,9 @@

var Whammy = (function () {
var Whammy = (function() {
function toWebM(frames) {
var info = checkFrames(frames);
if (!info) {
return [];
}

@@ -1180,3 +1190,3 @@ var CLUSTER_MAX_DURATION = 30000;

"id": 0xe7 // Timecode
}].concat(clusterFrames.map(function (webp) {
}].concat(clusterFrames.map(function(webp) {
var block = makeSimpleBlock({

@@ -1237,3 +1247,3 @@ discardable: 0,

function strToBuffer(str) {
return new Uint8Array(str.split('').map(function (e) {
return new Uint8Array(str.split('').map(function(e) {
return e.charCodeAt(0);

@@ -1315,3 +1325,3 @@ }));

}
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function (e) {
var out = [data.trackNum | 0x80, data.timecode >> 8, data.timecode & 0xff, flags].map(function(e) {
return String.fromCharCode(e);

@@ -1350,3 +1360,3 @@ }).join('') + data.frame;

var id = string.substr(offset, 4);
var len = parseInt(string.substr(offset + 4, 4).split('').map(function (i) {
var len = parseInt(string.substr(offset + 4, 4).split('').map(function(i) {
var unpadded = i.charCodeAt(0).toString(2);

@@ -1370,3 +1380,3 @@ return (new Array(8 - unpadded.length + 1)).join('0') + unpadded;

return [].slice.call(
new Uint8Array((new Float64Array([num])).buffer), 0).map(function (e) {
new Uint8Array((new Float64Array([num])).buffer), 0).map(function(e) {
return String.fromCharCode(e);

@@ -1384,3 +1394,3 @@ }).reverse().join('');

WhammyVideo.prototype.add = function (frame, duration) {
WhammyVideo.prototype.add = function(frame, duration) {
if ('canvas' in frame) { //CanvasRenderingContext2D

@@ -1402,4 +1412,4 @@ frame = frame.canvas;

};
WhammyVideo.prototype.compile = function () {
return new toWebM(this.frames.map(function (frame) {
WhammyVideo.prototype.compile = function() {
return new toWebM(this.frames.map(function(frame) {
var webp = parseWebP(parseRIFF(atob(frame.image.slice(23))));

@@ -1420,3 +1430,3 @@ webp.duration = frame.duration;

var DiskStorage = {
init: function () {
init: function() {
var self = this;

@@ -1449,3 +1459,3 @@ var indexedDB = window.indexedDB || window.webkitIndexedDB || window.mozIndexedDB || window.OIndexedDB || window.msIndexedDB;

function getFromStore(portionName) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function (event) {
transaction.objectStore(self.dataStoreName).get(portionName).onsuccess = function(event) {
if (self.callback) self.callback(event.target.result, portionName);

@@ -1462,3 +1472,3 @@ };

request.onsuccess = function () {
request.onsuccess = function() {
db = request.result;

@@ -1470,3 +1480,3 @@ db.onerror = self.onError;

var setVersion = db.setVersion(dbVersion);
setVersion.onsuccess = function () {
setVersion.onsuccess = function() {
createObjectStore(db);

@@ -1482,7 +1492,7 @@ putInDB();

};
request.onupgradeneeded = function (event) {
request.onupgradeneeded = function(event) {
createObjectStore(event.target.result);
};
},
Fetch: function (callback) {
Fetch: function(callback) {
this.callback = callback;

@@ -1493,3 +1503,3 @@ this.init();

},
Store: function (config) {
Store: function(config) {
this.audioBlob = config.audioBlob;

@@ -1503,3 +1513,3 @@ this.videoBlob = config.videoBlob;

},
onError: function (error) {
onError: function(error) {
console.error(JSON.stringify(error, null, '\t'));

@@ -1514,7 +1524,7 @@ },

function GifRecorder(mediaStream) {
if(!window.GIFEncoder) {
if (!window.GIFEncoder) {
throw 'Please link: https://cdn.webrtc-experiment.com/gif-recorder.js';
}
this.record = function () {
this.record = function() {
if (!this.width) this.width = video.offsetWidth || 320;

@@ -1572,2 +1582,4 @@ if (!this.height) this.height = video.offsetHeight || 240;

var self = this;
function drawVideoFrame(time) {

@@ -1584,2 +1596,7 @@ lastAnimationFrame = requestAnimationFrame(drawVideoFrame);

context.drawImage(video, 0, 0, canvas.width, canvas.height);
if (self.onGifPreview) {
self.onGifPreview(canvas.toDataURL('image/png'));
}
gifEncoder.addFrame(context);

@@ -1592,3 +1609,3 @@ lastFrameTime = time;

this.stop = function () {
this.stop = function() {
if (lastAnimationFrame) cancelAnimationFrame(lastAnimationFrame);

@@ -1598,3 +1615,3 @@

this.recordedBlob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
this.blob = new Blob([new Uint8Array(gifEncoder.stream().bin)], {
type: 'image/gif'

@@ -1627,3 +1644,5 @@ });

function dropFirstFrame(arr) {
arr.shift();
for (var i = 0; i < 60; i++) {
arr.shift();
}
return arr;

@@ -1630,0 +1649,0 @@ }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc