Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@uppy/audio

Package Overview
Dependencies
Maintainers
8
Versions
33
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@uppy/audio - npm Package Compare versions

Comparing version 0.2.1 to 0.3.0

lib/Audio.js

7

CHANGELOG.md
# @uppy/audio
## 0.3.0
Released: 2022-03-16
Included in: Uppy v2.8.0
- @uppy/audio: refactor to ESM (Antoine du Hamel / #3470)
## 0.2.1

@@ -4,0 +11,0 @@

10

lib/audio-oscilloscope/index.js
"use strict";
var _draw;
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }

@@ -23,3 +21,5 @@

module.exports = (_draw = /*#__PURE__*/_classPrivateFieldLooseKey("draw"), class AudioOscilloscope {
var _draw = /*#__PURE__*/_classPrivateFieldLooseKey("draw");
class AudioOscilloscope {
constructor(canvas, options) {

@@ -105,2 +105,4 @@ if (options === void 0) {

});
}
module.exports = AudioOscilloscope;
"use strict";
const {
h
} = require('preact');
var _preact = require("preact");

@@ -13,5 +11,5 @@ module.exports = _ref => {

} = _ref;
return h("div", {
return (0, _preact.h)("div", {
className: "uppy-Audio-videoSource"
}, h("select", {
}, (0, _preact.h)("select", {
className: "uppy-u-reset uppy-Audio-audioSource-select",

@@ -21,3 +19,3 @@ onChange: event => {

}
}, audioSources.map(audioSource => h("option", {
}, audioSources.map(audioSource => (0, _preact.h)("option", {
key: audioSource.deviceId,

@@ -24,0 +22,0 @@ value: audioSource.deviceId,

14

lib/DiscardButton.js
"use strict";
const {
h
} = require('preact');
var _preact = require("preact");

@@ -12,3 +10,3 @@ function DiscardButton(_ref) {

} = _ref;
return h("button", {
return (0, _preact.h)("button", {
className: "uppy-u-reset uppy-c-btn uppy-Audio-button",

@@ -20,3 +18,3 @@ type: "button",

"data-uppy-super-focusable": true
}, h("svg", {
}, (0, _preact.h)("svg", {
width: "13",

@@ -28,8 +26,8 @@ height: "13",

className: "uppy-c-icon"
}, h("g", {
}, (0, _preact.h)("g", {
fill: "#FFF",
fillRule: "evenodd"
}, h("path", {
}, (0, _preact.h)("path", {
d: "M.496 11.367L11.103.76l1.414 1.414L1.911 12.781z"
}), h("path", {
}), (0, _preact.h)("path", {
d: "M11.104 12.782L.497 2.175 1.911.76l10.607 10.606z"

@@ -36,0 +34,0 @@ }))));

"use strict";
function formatSeconds(seconds) {
return `${Math.floor(seconds / 60)}:${String(seconds % 60).padStart(2, 0)}`;
}
/**

@@ -10,4 +14,2 @@ * Takes an Integer value of seconds (e.g. 83) and converts it into a human-readable formatted string (e.g. '1:23').

*/
module.exports = function formatSeconds(seconds) {
return `${Math.floor(seconds / 60)}:${String(seconds % 60).padStart(2, 0)}`;
};
module.exports = formatSeconds;
"use strict";
var _class, _stream, _audioActive, _recordingChunks, _recorder, _capturedMediaFile, _mediaDevices, _supportsUserMedia, _hasAudioCheck, _start, _startRecording, _stopRecording, _discardRecordedAudio, _submit, _stop, _getAudio, _changeSource, _updateSources, _temp;
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
function _classPrivateFieldLooseBase(receiver, privateKey) { if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { throw new TypeError("attempted to use private field on non-instance"); } return receiver; }
var id = 0;
function _classPrivateFieldLooseKey(name) { return "__private_" + id++ + "_" + name; }
const {
h
} = require('preact');
const {
UIPlugin
} = require('@uppy/core');
const getFileTypeExtension = require('@uppy/utils/lib/getFileTypeExtension');
const supportsMediaRecorder = require('./supportsMediaRecorder');
const RecordingScreen = require('./RecordingScreen');
const PermissionsScreen = require('./PermissionsScreen');
const locale = require('./locale.js');
/**
* Audio recording plugin
*/
module.exports = (_temp = (_stream = /*#__PURE__*/_classPrivateFieldLooseKey("stream"), _audioActive = /*#__PURE__*/_classPrivateFieldLooseKey("audioActive"), _recordingChunks = /*#__PURE__*/_classPrivateFieldLooseKey("recordingChunks"), _recorder = /*#__PURE__*/_classPrivateFieldLooseKey("recorder"), _capturedMediaFile = /*#__PURE__*/_classPrivateFieldLooseKey("capturedMediaFile"), _mediaDevices = /*#__PURE__*/_classPrivateFieldLooseKey("mediaDevices"), _supportsUserMedia = /*#__PURE__*/_classPrivateFieldLooseKey("supportsUserMedia"), _hasAudioCheck = /*#__PURE__*/_classPrivateFieldLooseKey("hasAudioCheck"), _start = /*#__PURE__*/_classPrivateFieldLooseKey("start"), _startRecording = /*#__PURE__*/_classPrivateFieldLooseKey("startRecording"), _stopRecording = /*#__PURE__*/_classPrivateFieldLooseKey("stopRecording"), _discardRecordedAudio = /*#__PURE__*/_classPrivateFieldLooseKey("discardRecordedAudio"), _submit = /*#__PURE__*/_classPrivateFieldLooseKey("submit"), _stop = /*#__PURE__*/_classPrivateFieldLooseKey("stop"), _getAudio = /*#__PURE__*/_classPrivateFieldLooseKey("getAudio"), _changeSource = /*#__PURE__*/_classPrivateFieldLooseKey("changeSource"), _updateSources = /*#__PURE__*/_classPrivateFieldLooseKey("updateSources"), _class = class Audio extends UIPlugin {
constructor(uppy, opts) {
var _this;
super(uppy, opts);
_this = this;
Object.defineProperty(this, _getAudio, {
value: _getAudio2
});
Object.defineProperty(this, _hasAudioCheck, {
value: _hasAudioCheck2
});
Object.defineProperty(this, _stream, {
writable: true,
value: null
});
Object.defineProperty(this, _audioActive, {
writable: true,
value: false
});
Object.defineProperty(this, _recordingChunks, {
writable: true,
value: null
});
Object.defineProperty(this, _recorder, {
writable: true,
value: null
});
Object.defineProperty(this, _capturedMediaFile, {
writable: true,
value: null
});
Object.defineProperty(this, _mediaDevices, {
writable: true,
value: null
});
Object.defineProperty(this, _supportsUserMedia, {
writable: true,
value: null
});
Object.defineProperty(this, _start, {
writable: true,
value: function (options) {
if (options === void 0) {
options = null;
}
if (!_classPrivateFieldLooseBase(_this, _supportsUserMedia)[_supportsUserMedia]) {
return Promise.reject(new Error('Microphone access not supported'));
}
_classPrivateFieldLooseBase(_this, _audioActive)[_audioActive] = true;
_classPrivateFieldLooseBase(_this, _hasAudioCheck)[_hasAudioCheck]().then(hasAudio => {
_this.setPluginState({
hasAudio
}); // ask user for access to their camera
return _classPrivateFieldLooseBase(_this, _mediaDevices)[_mediaDevices].getUserMedia({
audio: true
}).then(stream => {
_classPrivateFieldLooseBase(_this, _stream)[_stream] = stream;
let currentDeviceId = null;
const tracks = stream.getAudioTracks();
if (!options || !options.deviceId) {
currentDeviceId = tracks[0].getSettings().deviceId;
} else {
tracks.forEach(track => {
if (track.getSettings().deviceId === options.deviceId) {
currentDeviceId = track.getSettings().deviceId;
}
});
} // Update the sources now, so we can access the names.
_classPrivateFieldLooseBase(_this, _updateSources)[_updateSources]();
_this.setPluginState({
currentDeviceId,
audioReady: true
});
}).catch(err => {
_this.setPluginState({
audioReady: false,
cameraError: err
});
_this.uppy.info(err.message, 'error');
});
});
}
});
Object.defineProperty(this, _startRecording, {
writable: true,
value: () => {
// only used if supportsMediaRecorder() returned true
// eslint-disable-next-line compat/compat
_classPrivateFieldLooseBase(this, _recorder)[_recorder] = new MediaRecorder(_classPrivateFieldLooseBase(this, _stream)[_stream]);
_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks] = [];
let stoppingBecauseOfMaxSize = false;
_classPrivateFieldLooseBase(this, _recorder)[_recorder].addEventListener('dataavailable', event => {
_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks].push(event.data);
const {
restrictions
} = this.uppy.opts;
if (_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks].length > 1 && restrictions.maxFileSize != null && !stoppingBecauseOfMaxSize) {
const totalSize = _classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks].reduce((acc, chunk) => acc + chunk.size, 0); // Exclude the initial chunk from the average size calculation because it is likely to be a very small outlier
const averageChunkSize = (totalSize - _classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks][0].size) / (_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks].length - 1);
const expectedEndChunkSize = averageChunkSize * 3;
const maxSize = Math.max(0, restrictions.maxFileSize - expectedEndChunkSize);
if (totalSize > maxSize) {
stoppingBecauseOfMaxSize = true;
this.uppy.info(this.i18n('recordingStoppedMaxSize'), 'warning', 4000);
_classPrivateFieldLooseBase(this, _stopRecording)[_stopRecording]();
}
}
}); // use a "time slice" of 500ms: ondataavailable will be called each 500ms
// smaller time slices mean we can more accurately check the max file size restriction
_classPrivateFieldLooseBase(this, _recorder)[_recorder].start(500); // Start the recordingLengthTimer if we are showing the recording length.
this.recordingLengthTimer = setInterval(() => {
const currentRecordingLength = this.getPluginState().recordingLengthSeconds;
this.setPluginState({
recordingLengthSeconds: currentRecordingLength + 1
});
}, 1000);
this.setPluginState({
isRecording: true
});
}
});
Object.defineProperty(this, _stopRecording, {
writable: true,
value: () => {
const stopped = new Promise(resolve => {
_classPrivateFieldLooseBase(this, _recorder)[_recorder].addEventListener('stop', () => {
resolve();
});
_classPrivateFieldLooseBase(this, _recorder)[_recorder].stop();
clearInterval(this.recordingLengthTimer);
this.setPluginState({
recordingLengthSeconds: 0
});
});
return stopped.then(() => {
this.setPluginState({
isRecording: false
});
return _classPrivateFieldLooseBase(this, _getAudio)[_getAudio]();
}).then(file => {
try {
_classPrivateFieldLooseBase(this, _capturedMediaFile)[_capturedMediaFile] = file; // create object url for capture result preview
this.setPluginState({
recordedAudio: URL.createObjectURL(file.data)
});
} catch (err) {
// Logging the error, exept restrictions, which is handled in Core
if (!err.isRestriction) {
this.uppy.log(err);
}
}
}).then(() => {
_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks] = null;
_classPrivateFieldLooseBase(this, _recorder)[_recorder] = null;
}, error => {
_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks] = null;
_classPrivateFieldLooseBase(this, _recorder)[_recorder] = null;
throw error;
});
}
});
Object.defineProperty(this, _discardRecordedAudio, {
writable: true,
value: () => {
this.setPluginState({
recordedAudio: null
});
_classPrivateFieldLooseBase(this, _capturedMediaFile)[_capturedMediaFile] = null;
}
});
Object.defineProperty(this, _submit, {
writable: true,
value: () => {
try {
if (_classPrivateFieldLooseBase(this, _capturedMediaFile)[_capturedMediaFile]) {
this.uppy.addFile(_classPrivateFieldLooseBase(this, _capturedMediaFile)[_capturedMediaFile]);
}
} catch (err) {
// Logging the error, exept restrictions, which is handled in Core
if (!err.isRestriction) {
this.uppy.log(err, 'error');
}
}
}
});
Object.defineProperty(this, _stop, {
writable: true,
value: async () => {
if (_classPrivateFieldLooseBase(this, _stream)[_stream]) {
const audioTracks = _classPrivateFieldLooseBase(this, _stream)[_stream].getAudioTracks();
audioTracks.forEach(track => track.stop());
}
if (_classPrivateFieldLooseBase(this, _recorder)[_recorder]) {
await new Promise(resolve => {
_classPrivateFieldLooseBase(this, _recorder)[_recorder].addEventListener('stop', resolve, {
once: true
});
_classPrivateFieldLooseBase(this, _recorder)[_recorder].stop();
clearInterval(this.recordingLengthTimer);
});
}
_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks] = null;
_classPrivateFieldLooseBase(this, _recorder)[_recorder] = null;
_classPrivateFieldLooseBase(this, _audioActive)[_audioActive] = false;
_classPrivateFieldLooseBase(this, _stream)[_stream] = null;
this.setPluginState({
recordedAudio: null,
isRecording: false,
recordingLengthSeconds: 0
});
}
});
Object.defineProperty(this, _changeSource, {
writable: true,
value: deviceId => {
_classPrivateFieldLooseBase(this, _stop)[_stop]();
_classPrivateFieldLooseBase(this, _start)[_start]({
deviceId
});
}
});
Object.defineProperty(this, _updateSources, {
writable: true,
value: () => {
_classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices].enumerateDevices().then(devices => {
this.setPluginState({
audioSources: devices.filter(device => device.kind === 'audioinput')
});
});
}
});
_classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices] = navigator.mediaDevices;
_classPrivateFieldLooseBase(this, _supportsUserMedia)[_supportsUserMedia] = _classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices] != null;
this.id = this.opts.id || 'Audio';
this.type = 'acquirer';
this.icon = () => h("svg", {
"aria-hidden": "true",
focusable: "false",
width: "32px",
height: "32px",
viewBox: "0 0 32 32"
}, h("g", {
fill: "none",
"fill-rule": "evenodd"
}, h("rect", {
fill: "#9B59B6",
width: "32",
height: "32",
rx: "16"
}), h("path", {
d: "M16 20c-2.21 0-4-1.71-4-3.818V9.818C12 7.71 13.79 6 16 6s4 1.71 4 3.818v6.364C20 18.29 18.21 20 16 20zm-6.364-7h.637c.351 0 .636.29.636.65v1.95c0 3.039 2.565 5.477 5.6 5.175 2.645-.264 4.582-2.692 4.582-5.407V13.65c0-.36.285-.65.636-.65h.637c.351 0 .636.29.636.65v1.631c0 3.642-2.544 6.888-6.045 7.382v1.387h2.227c.351 0 .636.29.636.65v.65c0 .36-.285.65-.636.65h-6.364a.643.643 0 0 1-.636-.65v-.65c0-.36.285-.65.636-.65h2.227v-1.372C11.637 22.2 9 19.212 9 15.6v-1.95c0-.36.285-.65.636-.65z",
fill: "#FFF",
"fill-rule": "nonzero"
})));
this.defaultLocale = locale;
this.opts = { ...opts
};
this.i18nInit();
this.title = this.i18n('pluginNameAudio');
this.setPluginState({
hasAudio: false,
audioReady: false,
cameraError: null,
recordingLengthSeconds: 0,
audioSources: [],
currentDeviceId: null
});
}
render() {
if (!_classPrivateFieldLooseBase(this, _audioActive)[_audioActive]) {
_classPrivateFieldLooseBase(this, _start)[_start]();
}
const audioState = this.getPluginState();
if (!audioState.audioReady || !audioState.hasAudio) {
return h(PermissionsScreen, {
icon: this.icon,
i18n: this.i18n,
hasAudio: audioState.hasAudio
});
}
return h(RecordingScreen // eslint-disable-next-line react/jsx-props-no-spreading
, _extends({}, audioState, {
audioActive: _classPrivateFieldLooseBase(this, _audioActive)[_audioActive],
onChangeSource: _classPrivateFieldLooseBase(this, _changeSource)[_changeSource],
onStartRecording: _classPrivateFieldLooseBase(this, _startRecording)[_startRecording],
onStopRecording: _classPrivateFieldLooseBase(this, _stopRecording)[_stopRecording],
onDiscardRecordedAudio: _classPrivateFieldLooseBase(this, _discardRecordedAudio)[_discardRecordedAudio],
onSubmit: _classPrivateFieldLooseBase(this, _submit)[_submit],
onStop: _classPrivateFieldLooseBase(this, _stop)[_stop],
i18n: this.i18n,
showAudioSourceDropdown: this.opts.showAudioSourceDropdown,
supportsRecording: supportsMediaRecorder(),
recording: audioState.isRecording,
stream: _classPrivateFieldLooseBase(this, _stream)[_stream]
}));
}
install() {
this.setPluginState({
audioReady: false,
recordingLengthSeconds: 0
});
const {
target
} = this.opts;
if (target) {
this.mount(target, this);
}
if (_classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices]) {
_classPrivateFieldLooseBase(this, _updateSources)[_updateSources]();
_classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices].ondevicechange = () => {
_classPrivateFieldLooseBase(this, _updateSources)[_updateSources]();
if (_classPrivateFieldLooseBase(this, _stream)[_stream]) {
let restartStream = true;
const {
audioSources,
currentDeviceId
} = this.getPluginState();
audioSources.forEach(audioSource => {
if (currentDeviceId === audioSource.deviceId) {
restartStream = false;
}
});
if (restartStream) {
_classPrivateFieldLooseBase(this, _stop)[_stop]();
_classPrivateFieldLooseBase(this, _start)[_start]();
}
}
};
}
}
uninstall() {
if (_classPrivateFieldLooseBase(this, _stream)[_stream]) {
_classPrivateFieldLooseBase(this, _stop)[_stop]();
}
this.unmount();
}
}), _class.VERSION = "0.2.1", _temp);
function _hasAudioCheck2() {
if (!_classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices]) {
return Promise.resolve(false);
}
return _classPrivateFieldLooseBase(this, _mediaDevices)[_mediaDevices].enumerateDevices().then(devices => {
return devices.some(device => device.kind === 'audioinput');
});
}
function _getAudio2() {
// Sometimes in iOS Safari, Blobs (especially the first Blob in the recordingChunks Array)
// have empty 'type' attributes (e.g. '') so we need to find a Blob that has a defined 'type'
// attribute in order to determine the correct MIME type.
const mimeType = _classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks].find(blob => {
var _blob$type;
return ((_blob$type = blob.type) == null ? void 0 : _blob$type.length) > 0;
}).type;
const fileExtension = getFileTypeExtension(mimeType);
if (!fileExtension) {
return Promise.reject(new Error(`Could not retrieve recording: Unsupported media type "${mimeType}"`));
}
const name = `audio-${Date.now()}.${fileExtension}`;
const blob = new Blob(_classPrivateFieldLooseBase(this, _recordingChunks)[_recordingChunks], {
type: mimeType
});
const file = {
source: this.id,
name,
data: new Blob([blob], {
type: mimeType
}),
type: mimeType
};
return Promise.resolve(file);
}
module.exports = require("./Audio.js");
"use strict";
const {
h
} = require('preact');
var _preact = require("preact");

@@ -13,9 +11,9 @@ module.exports = props => {

} = props;
return h("div", {
return (0, _preact.h)("div", {
className: "uppy-Audio-permissons"
}, h("div", {
}, (0, _preact.h)("div", {
className: "uppy-Audio-permissonsIcon"
}, icon()), h("h1", {
}, icon()), (0, _preact.h)("h1", {
className: "uppy-Audio-title"
}, hasAudio ? i18n('allowAudioAccessTitle') : i18n('noAudioTitle')), h("p", null, hasAudio ? i18n('allowAudioAccessDescription') : i18n('noAudioDescription')));
}, hasAudio ? i18n('allowAudioAccessTitle') : i18n('noAudioTitle')), (0, _preact.h)("p", null, hasAudio ? i18n('allowAudioAccessDescription') : i18n('noAudioDescription')));
};
"use strict";
const {
h
} = require('preact');
var _preact = require("preact");
module.exports = function RecordButton(_ref) {
function RecordButton(_ref) {
let {

@@ -16,3 +14,3 @@ recording,

if (recording) {
return h("button", {
return (0, _preact.h)("button", {
className: "uppy-u-reset uppy-c-btn uppy-Audio-button",

@@ -24,3 +22,3 @@ type: "button",

"data-uppy-super-focusable": true
}, h("svg", {
}, (0, _preact.h)("svg", {
"aria-hidden": "true",

@@ -32,3 +30,3 @@ focusable: "false",

viewBox: "0 0 100 100"
}, h("rect", {
}, (0, _preact.h)("rect", {
x: "15",

@@ -41,3 +39,3 @@ y: "15",

return h("button", {
return (0, _preact.h)("button", {
className: "uppy-u-reset uppy-c-btn uppy-Audio-button",

@@ -49,3 +47,3 @@ type: "button",

"data-uppy-super-focusable": true
}, h("svg", {
}, (0, _preact.h)("svg", {
"aria-hidden": "true",

@@ -57,3 +55,3 @@ focusable: "false",

viewBox: "0 0 14 20"
}, h("path", {
}, (0, _preact.h)("path", {
d: "M7 14c2.21 0 4-1.71 4-3.818V3.818C11 1.71 9.21 0 7 0S3 1.71 3 3.818v6.364C3 12.29 4.79 14 7 14zm6.364-7h-.637a.643.643 0 0 0-.636.65V9.6c0 3.039-2.565 5.477-5.6 5.175-2.645-.264-4.582-2.692-4.582-5.407V7.65c0-.36-.285-.65-.636-.65H.636A.643.643 0 0 0 0 7.65v1.631c0 3.642 2.544 6.888 6.045 7.382v1.387H3.818a.643.643 0 0 0-.636.65v.65c0 .36.285.65.636.65h6.364c.351 0 .636-.29.636-.65v-.65c0-.36-.285-.65-.636-.65H7.955v-1.372C11.363 16.2 14 13.212 14 9.6V7.65c0-.36-.285-.65-.636-.65z",

@@ -63,2 +61,4 @@ fill: "#FFF",

})));
};
}
module.exports = RecordButton;
"use strict";
const {
h
} = require('preact');
var _preact = require("preact");
const formatSeconds = require('./formatSeconds');
const formatSeconds = require("./formatSeconds.js");
module.exports = function RecordingLength(_ref) {
function RecordingLength(_ref) {
let {

@@ -15,3 +13,3 @@ recordingLengthSeconds,

const formattedRecordingLengthSeconds = formatSeconds(recordingLengthSeconds);
return h("span", {
return (0, _preact.h)("span", {
"aria-label": i18n('recordingLength', {

@@ -21,2 +19,4 @@ recording_length: formattedRecordingLengthSeconds

}, formattedRecordingLengthSeconds);
};
}
module.exports = RecordingLength;
"use strict";
/* eslint-disable jsx-a11y/media-has-caption */
const {
h
} = require('preact');
var _preact = require("preact");
const {
useEffect,
useRef
} = require('preact/hooks');
var _hooks = require("preact/hooks");
const RecordButton = require('./RecordButton');
/* eslint-disable jsx-a11y/media-has-caption */
const RecordButton = require("./RecordButton.js");
const RecordingLength = require('./RecordingLength');
const RecordingLength = require("./RecordingLength.js");
const AudioSourceSelect = require('./AudioSourceSelect');
const AudioSourceSelect = require("./AudioSourceSelect.js");
const AudioOscilloscope = require('./audio-oscilloscope');
const AudioOscilloscope = require("./audio-oscilloscope/index.js");
const SubmitButton = require('./SubmitButton');
const SubmitButton = require("./SubmitButton.js");
const DiscardButton = require('./DiscardButton');
const DiscardButton = require("./DiscardButton.js");
module.exports = function RecordingScreen(props) {
function RecordingScreen(props) {
const {

@@ -41,6 +36,6 @@ stream,

} = props;
const canvasEl = useRef(null);
const oscilloscope = useRef(null); // componentDidMount / componentDidUnmount
const canvasEl = (0, _hooks.useRef)(null);
const oscilloscope = (0, _hooks.useRef)(null); // componentDidMount / componentDidUnmount
useEffect(() => {
(0, _hooks.useEffect)(() => {
return () => {

@@ -52,3 +47,3 @@ oscilloscope.current = null;

useEffect(() => {
(0, _hooks.useEffect)(() => {
if (!recordedAudio) {

@@ -78,20 +73,20 @@ oscilloscope.current = new AudioOscilloscope(canvasEl.current, {

const shouldShowAudioSourceDropdown = showAudioSourceDropdown && !hasRecordedAudio && audioSources && audioSources.length > 1;
return h("div", {
return (0, _preact.h)("div", {
className: "uppy-Audio-container"
}, h("div", {
}, (0, _preact.h)("div", {
className: "uppy-Audio-audioContainer"
}, hasRecordedAudio ? h("audio", {
}, hasRecordedAudio ? (0, _preact.h)("audio", {
className: "uppy-Audio-player",
controls: true,
src: recordedAudio
}) : h("canvas", {
}) : (0, _preact.h)("canvas", {
ref: canvasEl,
className: "uppy-Audio-canvas"
})), h("div", {
})), (0, _preact.h)("div", {
className: "uppy-Audio-footer"
}, h("div", {
}, (0, _preact.h)("div", {
className: "uppy-Audio-audioSourceContainer"
}, shouldShowAudioSourceDropdown ? AudioSourceSelect(props) : null), h("div", {
}, shouldShowAudioSourceDropdown ? AudioSourceSelect(props) : null), (0, _preact.h)("div", {
className: "uppy-Audio-buttonContainer"
}, shouldShowRecordButton && h(RecordButton, {
}, shouldShowRecordButton && (0, _preact.h)(RecordButton, {
recording: recording,

@@ -101,14 +96,16 @@ onStartRecording: onStartRecording,

i18n: i18n
}), hasRecordedAudio && h(SubmitButton, {
}), hasRecordedAudio && (0, _preact.h)(SubmitButton, {
onSubmit: onSubmit,
i18n: i18n
}), hasRecordedAudio && h(DiscardButton, {
}), hasRecordedAudio && (0, _preact.h)(DiscardButton, {
onDiscard: onDiscardRecordedAudio,
i18n: i18n
})), h("div", {
})), (0, _preact.h)("div", {
className: "uppy-Audio-recordingLength"
}, !hasRecordedAudio && h(RecordingLength, {
}, !hasRecordedAudio && (0, _preact.h)(RecordingLength, {
recordingLengthSeconds: recordingLengthSeconds,
i18n: i18n
}))));
};
}
module.exports = RecordingScreen;
"use strict";
const {
h
} = require('preact');
var _preact = require("preact");

@@ -12,3 +10,3 @@ function SubmitButton(_ref) {

} = _ref;
return h("button", {
return (0, _preact.h)("button", {
className: "uppy-u-reset uppy-c-btn uppy-Audio-button uppy-Audio-button--submit",

@@ -20,3 +18,3 @@ type: "button",

"data-uppy-super-focusable": true
}, h("svg", {
}, (0, _preact.h)("svg", {
width: "12",

@@ -29,3 +27,3 @@ height: "9",

className: "uppy-c-icon"
}, h("path", {
}, (0, _preact.h)("path", {
fill: "#fff",

@@ -32,0 +30,0 @@ fillRule: "nonzero",

"use strict";
module.exports = function supportsMediaRecorder() {
function supportsMediaRecorder() {
var _MediaRecorder$protot;

@@ -9,2 +9,4 @@

/* eslint-enable compat/compat */
};
}
module.exports = supportsMediaRecorder;
{
"name": "@uppy/audio",
"description": "Uppy plugin that records audio using the device’s microphone.",
"version": "0.2.1",
"version": "0.3.0",
"license": "MIT",

@@ -19,2 +19,3 @@ "main": "lib/index.js",

],
"type": "module",
"homepage": "https://uppy.io",

@@ -29,7 +30,7 @@ "bugs": {

"dependencies": {
"@uppy/utils": "^4.0.4",
"@uppy/utils": "^4.0.5",
"preact": "^10.5.13"
},
"peerDependencies": {
"@uppy/core": "^2.1.4"
"@uppy/core": "^2.1.6"
},

@@ -36,0 +37,0 @@ "publishConfig": {

@@ -12,3 +12,3 @@ function isFunction (v) {

*/
module.exports = class AudioOscilloscope {
export default class AudioOscilloscope {
constructor (canvas, options = {}) {

@@ -15,0 +15,0 @@ const canvasOptions = options.canvas || {}

@@ -8,3 +8,3 @@ /**

*/
module.exports = function formatSeconds (seconds) {
export default function formatSeconds (seconds) {
return `${Math.floor(

@@ -11,0 +11,0 @@ seconds / 60,

@@ -1,2 +0,2 @@

const formatSeconds = require('./formatSeconds')
import formatSeconds from './formatSeconds.js'

@@ -3,0 +3,0 @@ describe('formatSeconds', () => {

@@ -1,364 +0,1 @@

const { h } = require('preact')
const { UIPlugin } = require('@uppy/core')
const getFileTypeExtension = require('@uppy/utils/lib/getFileTypeExtension')
const supportsMediaRecorder = require('./supportsMediaRecorder')
const RecordingScreen = require('./RecordingScreen')
const PermissionsScreen = require('./PermissionsScreen')
const locale = require('./locale.js')
/**
* Audio recording plugin
*/
module.exports = class Audio extends UIPlugin {
static VERSION = require('../package.json').version
#stream = null
#audioActive = false
#recordingChunks = null
#recorder = null
#capturedMediaFile = null
#mediaDevices = null
#supportsUserMedia = null
constructor (uppy, opts) {
super(uppy, opts)
this.#mediaDevices = navigator.mediaDevices
this.#supportsUserMedia = this.#mediaDevices != null
this.id = this.opts.id || 'Audio'
this.type = 'acquirer'
this.icon = () => (
<svg aria-hidden="true" focusable="false" width="32px" height="32px" viewBox="0 0 32 32">
<g fill="none" fill-rule="evenodd">
<rect fill="#9B59B6" width="32" height="32" rx="16" />
<path d="M16 20c-2.21 0-4-1.71-4-3.818V9.818C12 7.71 13.79 6 16 6s4 1.71 4 3.818v6.364C20 18.29 18.21 20 16 20zm-6.364-7h.637c.351 0 .636.29.636.65v1.95c0 3.039 2.565 5.477 5.6 5.175 2.645-.264 4.582-2.692 4.582-5.407V13.65c0-.36.285-.65.636-.65h.637c.351 0 .636.29.636.65v1.631c0 3.642-2.544 6.888-6.045 7.382v1.387h2.227c.351 0 .636.29.636.65v.65c0 .36-.285.65-.636.65h-6.364a.643.643 0 0 1-.636-.65v-.65c0-.36.285-.65.636-.65h2.227v-1.372C11.637 22.2 9 19.212 9 15.6v-1.95c0-.36.285-.65.636-.65z" fill="#FFF" fill-rule="nonzero" />
</g>
</svg>
)
this.defaultLocale = locale
this.opts = { ...opts }
this.i18nInit()
this.title = this.i18n('pluginNameAudio')
this.setPluginState({
hasAudio: false,
audioReady: false,
cameraError: null,
recordingLengthSeconds: 0,
audioSources: [],
currentDeviceId: null,
})
}
#hasAudioCheck () {
if (!this.#mediaDevices) {
return Promise.resolve(false)
}
return this.#mediaDevices.enumerateDevices().then(devices => {
return devices.some(device => device.kind === 'audioinput')
})
}
// eslint-disable-next-line consistent-return
#start = (options = null) => {
if (!this.#supportsUserMedia) {
return Promise.reject(new Error('Microphone access not supported'))
}
this.#audioActive = true
this.#hasAudioCheck().then(hasAudio => {
this.setPluginState({
hasAudio,
})
// ask user for access to their camera
return this.#mediaDevices.getUserMedia({ audio: true })
.then((stream) => {
this.#stream = stream
let currentDeviceId = null
const tracks = stream.getAudioTracks()
if (!options || !options.deviceId) {
currentDeviceId = tracks[0].getSettings().deviceId
} else {
tracks.forEach((track) => {
if (track.getSettings().deviceId === options.deviceId) {
currentDeviceId = track.getSettings().deviceId
}
})
}
// Update the sources now, so we can access the names.
this.#updateSources()
this.setPluginState({
currentDeviceId,
audioReady: true,
})
})
.catch((err) => {
this.setPluginState({
audioReady: false,
cameraError: err,
})
this.uppy.info(err.message, 'error')
})
})
}
#startRecording = () => {
// only used if supportsMediaRecorder() returned true
// eslint-disable-next-line compat/compat
this.#recorder = new MediaRecorder(this.#stream)
this.#recordingChunks = []
let stoppingBecauseOfMaxSize = false
this.#recorder.addEventListener('dataavailable', (event) => {
this.#recordingChunks.push(event.data)
const { restrictions } = this.uppy.opts
if (this.#recordingChunks.length > 1
&& restrictions.maxFileSize != null
&& !stoppingBecauseOfMaxSize) {
const totalSize = this.#recordingChunks.reduce((acc, chunk) => acc + chunk.size, 0)
// Exclude the initial chunk from the average size calculation because it is likely to be a very small outlier
const averageChunkSize = (totalSize - this.#recordingChunks[0].size) / (this.#recordingChunks.length - 1)
const expectedEndChunkSize = averageChunkSize * 3
const maxSize = Math.max(0, restrictions.maxFileSize - expectedEndChunkSize)
if (totalSize > maxSize) {
stoppingBecauseOfMaxSize = true
this.uppy.info(this.i18n('recordingStoppedMaxSize'), 'warning', 4000)
this.#stopRecording()
}
}
})
// use a "time slice" of 500ms: ondataavailable will be called each 500ms
// smaller time slices mean we can more accurately check the max file size restriction
this.#recorder.start(500)
// Start the recordingLengthTimer if we are showing the recording length.
this.recordingLengthTimer = setInterval(() => {
const currentRecordingLength = this.getPluginState().recordingLengthSeconds
this.setPluginState({ recordingLengthSeconds: currentRecordingLength + 1 })
}, 1000)
this.setPluginState({
isRecording: true,
})
}
#stopRecording = () => {
const stopped = new Promise((resolve) => {
this.#recorder.addEventListener('stop', () => {
resolve()
})
this.#recorder.stop()
clearInterval(this.recordingLengthTimer)
this.setPluginState({ recordingLengthSeconds: 0 })
})
return stopped.then(() => {
this.setPluginState({
isRecording: false,
})
return this.#getAudio()
}).then((file) => {
try {
this.#capturedMediaFile = file
// create object url for capture result preview
this.setPluginState({
recordedAudio: URL.createObjectURL(file.data),
})
} catch (err) {
// Logging the error, exept restrictions, which is handled in Core
if (!err.isRestriction) {
this.uppy.log(err)
}
}
}).then(() => {
this.#recordingChunks = null
this.#recorder = null
}, (error) => {
this.#recordingChunks = null
this.#recorder = null
throw error
})
}
#discardRecordedAudio = () => {
this.setPluginState({ recordedAudio: null })
this.#capturedMediaFile = null
}
#submit = () => {
try {
if (this.#capturedMediaFile) {
this.uppy.addFile(this.#capturedMediaFile)
}
} catch (err) {
// Logging the error, exept restrictions, which is handled in Core
if (!err.isRestriction) {
this.uppy.log(err, 'error')
}
}
}
#stop = async () => {
if (this.#stream) {
const audioTracks = this.#stream.getAudioTracks()
audioTracks.forEach((track) => track.stop())
}
if (this.#recorder) {
await new Promise((resolve) => {
this.#recorder.addEventListener('stop', resolve, { once: true })
this.#recorder.stop()
clearInterval(this.recordingLengthTimer)
})
}
this.#recordingChunks = null
this.#recorder = null
this.#audioActive = false
this.#stream = null
this.setPluginState({
recordedAudio: null,
isRecording: false,
recordingLengthSeconds: 0,
})
}
#getAudio () {
// Sometimes in iOS Safari, Blobs (especially the first Blob in the recordingChunks Array)
// have empty 'type' attributes (e.g. '') so we need to find a Blob that has a defined 'type'
// attribute in order to determine the correct MIME type.
const mimeType = this.#recordingChunks.find(blob => blob.type?.length > 0).type
const fileExtension = getFileTypeExtension(mimeType)
if (!fileExtension) {
return Promise.reject(new Error(`Could not retrieve recording: Unsupported media type "${mimeType}"`))
}
const name = `audio-${Date.now()}.${fileExtension}`
const blob = new Blob(this.#recordingChunks, { type: mimeType })
const file = {
source: this.id,
name,
data: new Blob([blob], { type: mimeType }),
type: mimeType,
}
return Promise.resolve(file)
}
#changeSource = (deviceId) => {
this.#stop()
this.#start({ deviceId })
}
#updateSources = () => {
this.#mediaDevices.enumerateDevices().then(devices => {
this.setPluginState({
audioSources: devices.filter((device) => device.kind === 'audioinput'),
})
})
}
render () {
if (!this.#audioActive) {
this.#start()
}
const audioState = this.getPluginState()
if (!audioState.audioReady || !audioState.hasAudio) {
return (
<PermissionsScreen
icon={this.icon}
i18n={this.i18n}
hasAudio={audioState.hasAudio}
/>
)
}
return (
<RecordingScreen
// eslint-disable-next-line react/jsx-props-no-spreading
{...audioState}
audioActive={this.#audioActive}
onChangeSource={this.#changeSource}
onStartRecording={this.#startRecording}
onStopRecording={this.#stopRecording}
onDiscardRecordedAudio={this.#discardRecordedAudio}
onSubmit={this.#submit}
onStop={this.#stop}
i18n={this.i18n}
showAudioSourceDropdown={this.opts.showAudioSourceDropdown}
supportsRecording={supportsMediaRecorder()}
recording={audioState.isRecording}
stream={this.#stream}
/>
)
}
install () {
this.setPluginState({
audioReady: false,
recordingLengthSeconds: 0,
})
const { target } = this.opts
if (target) {
this.mount(target, this)
}
if (this.#mediaDevices) {
this.#updateSources()
this.#mediaDevices.ondevicechange = () => {
this.#updateSources()
if (this.#stream) {
let restartStream = true
const { audioSources, currentDeviceId } = this.getPluginState()
audioSources.forEach((audioSource) => {
if (currentDeviceId === audioSource.deviceId) {
restartStream = false
}
})
if (restartStream) {
this.#stop()
this.#start()
}
}
}
}
}
uninstall () {
if (this.#stream) {
this.#stop()
}
this.unmount()
}
}
export { default } from './Audio.jsx'

@@ -1,2 +0,2 @@

module.exports = {
export default {
strings: {

@@ -3,0 +3,0 @@ pluginNameAudio: 'Audio',

@@ -1,2 +0,2 @@

module.exports = function supportsMediaRecorder () {
export default function supportsMediaRecorder () {
/* eslint-disable compat/compat */

@@ -3,0 +3,0 @@ return typeof MediaRecorder === 'function'

@@ -1,2 +0,3 @@

const supportsMediaRecorder = require('./supportsMediaRecorder')
/* eslint-disable max-classes-per-file */
import supportsMediaRecorder from './supportsMediaRecorder.js'

@@ -3,0 +4,0 @@ describe('supportsMediaRecorder', () => {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc