Socket
Socket
Sign inDemoInstall

extendable-media-recorder

Package Overview
Dependencies
Maintainers
1
Versions
380
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

extendable-media-recorder - npm Package Compare versions

Comparing version 7.1.0 to 7.1.1

485

build/es5/bundle.js

@@ -7,15 +7,2 @@ (function (global, factory) {

function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var _asyncToGenerator__default = /*#__PURE__*/_interopDefaultLegacy(_asyncToGenerator);
var _regeneratorRuntime__default = /*#__PURE__*/_interopDefaultLegacy(_regeneratorRuntime);
var _classCallCheck__default = /*#__PURE__*/_interopDefaultLegacy(_classCallCheck);
var _createClass__default = /*#__PURE__*/_interopDefaultLegacy(_createClass);
var _assertThisInitialized__default = /*#__PURE__*/_interopDefaultLegacy(_assertThisInitialized);
var _inherits__default = /*#__PURE__*/_interopDefaultLegacy(_inherits);
var _possibleConstructorReturn__default = /*#__PURE__*/_interopDefaultLegacy(_possibleConstructorReturn);
var _getPrototypeOf__default = /*#__PURE__*/_interopDefaultLegacy(_getPrototypeOf);
var _slicedToArray__default = /*#__PURE__*/_interopDefaultLegacy(_slicedToArray);
var _toConsumableArray__default = /*#__PURE__*/_interopDefaultLegacy(_toConsumableArray);
var createBlobEventFactory = function createBlobEventFactory(nativeBlobEventConstructor) {

@@ -71,7 +58,7 @@ return function (type, blobEventInit) {

var nativeEventTarget = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
_classCallCheck__default["default"](this, EventTarget);
_classCallCheck(this, EventTarget);
this._listeners = new WeakMap();
this._nativeEventTarget = nativeEventTarget === null ? createEventTarget() : nativeEventTarget;
}
_createClass__default["default"](EventTarget, [{
_createClass(EventTarget, [{
key: "addEventListener",

@@ -184,7 +171,7 @@ value: function addEventListener(type, listener, options) {

function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf__default["default"](Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf__default["default"](this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn__default["default"](this, result); }; }
function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = _getPrototypeOf(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = _getPrototypeOf(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return _possibleConstructorReturn(this, result); }; }
function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
var createMediaRecorderConstructor = function createMediaRecorderConstructor(createNativeMediaRecorder, createNotSupportedError, createWebAudioMediaRecorder, createWebmPcmMediaRecorder, encoderRegexes, eventTargetConstructor, nativeMediaRecorderConstructor) {
return /*#__PURE__*/function (_eventTargetConstruct) {
_inherits__default["default"](MediaRecorder, _eventTargetConstruct);
_inherits(MediaRecorder, _eventTargetConstruct);
var _super = _createSuper(MediaRecorder);

@@ -194,3 +181,3 @@ function MediaRecorder(stream) {

var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
_classCallCheck__default["default"](this, MediaRecorder);
_classCallCheck(this, MediaRecorder);
var mimeType = options.mimeType;

@@ -209,5 +196,5 @@ if (nativeMediaRecorderConstructor !== null && (

if (nativeMediaRecorderConstructor !== null && nativeMediaRecorderConstructor.isTypeSupported !== undefined && nativeMediaRecorderConstructor.isTypeSupported('audio/webm;codecs=pcm')) {
_this._internalMediaRecorder = createWebmPcmMediaRecorder(_assertThisInitialized__default["default"](_this), nativeMediaRecorderConstructor, stream, mimeType);
_this._internalMediaRecorder = createWebmPcmMediaRecorder(_assertThisInitialized(_this), nativeMediaRecorderConstructor, stream, mimeType);
} else {
_this._internalMediaRecorder = createWebAudioMediaRecorder(_assertThisInitialized__default["default"](_this), stream, mimeType);
_this._internalMediaRecorder = createWebAudioMediaRecorder(_assertThisInitialized(_this), stream, mimeType);
}

@@ -227,5 +214,5 @@ } else {

_this._onstop = null;
return _possibleConstructorReturn__default["default"](_this);
return _possibleConstructorReturn(_this);
}
_createClass__default["default"](MediaRecorder, [{
_createClass(MediaRecorder, [{
key: "mimeType",

@@ -623,40 +610,38 @@ get: function get() {

var createPromisedAudioNodesEncoderIdAndPort = /*#__PURE__*/function () {
var _ref = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee(audioBuffer, audioContext, channelCount, mediaStream, mimeType) {
var _ref = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(audioBuffer, audioContext, channelCount, mediaStream, mimeType) {
var _yield$instantiate, encoderId, port, audioBufferSourceNode, mediaStreamAudioSourceNode, recorderAudioWorkletNode;
return _regeneratorRuntime__default["default"].wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.next = 2;
return mediaEncoderHost.instantiate(mimeType, audioContext.sampleRate);
case 2:
_yield$instantiate = _context.sent;
encoderId = _yield$instantiate.encoderId;
port = _yield$instantiate.port;
if (!(standardizedAudioContext.AudioWorkletNode === undefined)) {
_context.next = 7;
break;
}
throw new Error(ERROR_MESSAGE);
case 7:
audioBufferSourceNode = new standardizedAudioContext.AudioBufferSourceNode(audioContext, {
buffer: audioBuffer
});
mediaStreamAudioSourceNode = new standardizedAudioContext.MediaStreamAudioSourceNode(audioContext, {
mediaStream: mediaStream
});
recorderAudioWorkletNode = recorderAudioWorklet.createRecorderAudioWorkletNode(standardizedAudioContext.AudioWorkletNode, audioContext, {
channelCount: channelCount
});
return _context.abrupt("return", {
audioBufferSourceNode: audioBufferSourceNode,
encoderId: encoderId,
mediaStreamAudioSourceNode: mediaStreamAudioSourceNode,
port: port,
recorderAudioWorkletNode: recorderAudioWorkletNode
});
case 11:
case "end":
return _context.stop();
}
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) switch (_context.prev = _context.next) {
case 0:
_context.next = 2;
return mediaEncoderHost.instantiate(mimeType, audioContext.sampleRate);
case 2:
_yield$instantiate = _context.sent;
encoderId = _yield$instantiate.encoderId;
port = _yield$instantiate.port;
if (!(standardizedAudioContext.AudioWorkletNode === undefined)) {
_context.next = 7;
break;
}
throw new Error(ERROR_MESSAGE);
case 7:
audioBufferSourceNode = new standardizedAudioContext.AudioBufferSourceNode(audioContext, {
buffer: audioBuffer
});
mediaStreamAudioSourceNode = new standardizedAudioContext.MediaStreamAudioSourceNode(audioContext, {
mediaStream: mediaStream
});
recorderAudioWorkletNode = recorderAudioWorklet.createRecorderAudioWorkletNode(standardizedAudioContext.AudioWorkletNode, audioContext, {
channelCount: channelCount
});
return _context.abrupt("return", {
audioBufferSourceNode: audioBufferSourceNode,
encoderId: encoderId,
mediaStreamAudioSourceNode: mediaStreamAudioSourceNode,
port: port,
recorderAudioWorkletNode: recorderAudioWorkletNode
});
case 11:
case "end":
return _context.stop();
}

@@ -702,22 +687,20 @@ }, _callee);

var requestNextPartialRecording = /*#__PURE__*/function () {
var _ref2 = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee2(encoderId, timeslice) {
var _ref2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2(encoderId, timeslice) {
var arrayBuffers;
return _regeneratorRuntime__default["default"].wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
_context2.next = 2;
return mediaEncoderHost.encode(encoderId, timeslice);
case 2:
arrayBuffers = _context2.sent;
if (promisedAudioNodesAndEncoderId === null) {
bufferedArrayBuffers.push.apply(bufferedArrayBuffers, _toConsumableArray__default["default"](arrayBuffers));
} else {
dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
}
case 4:
case "end":
return _context2.stop();
}
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) switch (_context2.prev = _context2.next) {
case 0:
_context2.next = 2;
return mediaEncoderHost.encode(encoderId, timeslice);
case 2:
arrayBuffers = _context2.sent;
if (promisedAudioNodesAndEncoderId === null) {
bufferedArrayBuffers.push.apply(bufferedArrayBuffers, _toConsumableArray(arrayBuffers));
} else {
dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
}
case 4:
case "end":
return _context2.stop();
}

@@ -746,38 +729,36 @@ }, _callee2);

promisedAudioNodesAndEncoderId.then( /*#__PURE__*/function () {
var _ref4 = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee3(_ref3) {
var _ref4 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee3(_ref3) {
var constantSourceNode, encoderId, mediaStreamAudioSourceNode, recorderAudioWorkletNode, arrayBuffers;
return _regeneratorRuntime__default["default"].wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
constantSourceNode = _ref3.constantSourceNode, encoderId = _ref3.encoderId, mediaStreamAudioSourceNode = _ref3.mediaStreamAudioSourceNode, recorderAudioWorkletNode = _ref3.recorderAudioWorkletNode;
if (promisedPartialRecording !== null) {
promisedPartialRecording["catch"](function () {
/* @todo Only catch the errors caused by a duplicate call to encode. */
});
promisedPartialRecording = null;
}
_context3.next = 4;
return recorderAudioWorkletNode.stop();
case 4:
mediaStreamAudioSourceNode.disconnect(recorderAudioWorkletNode);
constantSourceNode.stop();
_context3.next = 8;
return mediaEncoderHost.encode(encoderId, null);
case 8:
arrayBuffers = _context3.sent;
if (!(promisedAudioNodesAndEncoderId === null)) {
_context3.next = 12;
break;
}
return _regeneratorRuntime.wrap(function _callee3$(_context3) {
while (1) switch (_context3.prev = _context3.next) {
case 0:
constantSourceNode = _ref3.constantSourceNode, encoderId = _ref3.encoderId, mediaStreamAudioSourceNode = _ref3.mediaStreamAudioSourceNode, recorderAudioWorkletNode = _ref3.recorderAudioWorkletNode;
if (promisedPartialRecording !== null) {
promisedPartialRecording["catch"](function () {
/* @todo Only catch the errors caused by a duplicate call to encode. */
});
promisedPartialRecording = null;
}
_context3.next = 4;
return recorderAudioWorkletNode.stop();
case 4:
mediaStreamAudioSourceNode.disconnect(recorderAudioWorkletNode);
constantSourceNode.stop();
_context3.next = 8;
return mediaEncoderHost.encode(encoderId, null);
case 8:
arrayBuffers = _context3.sent;
if (!(promisedAudioNodesAndEncoderId === null)) {
_context3.next = 12;
return suspend();
case 12:
dispatchDataAvailableEvent([].concat(bufferedArrayBuffers, _toConsumableArray__default["default"](arrayBuffers)));
bufferedArrayBuffers.length = 0;
eventTarget.dispatchEvent(new Event('stop'));
case 15:
case "end":
return _context3.stop();
}
break;
}
_context3.next = 12;
return suspend();
case 12:
dispatchDataAvailableEvent([].concat(bufferedArrayBuffers, _toConsumableArray(arrayBuffers)));
bufferedArrayBuffers.length = 0;
eventTarget.dispatchEvent(new Event('stop'));
case 15:
case "end":
return _context3.stop();
}

@@ -837,43 +818,41 @@ }, _callee3);

})]).then( /*#__PURE__*/function () {
var _ref6 = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee4(_ref5) {
var _ref6 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee4(_ref5) {
var _ref7, _ref7$, audioBufferSourceNode, encoderId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode, constantSourceNode;
return _regeneratorRuntime__default["default"].wrap(function _callee4$(_context4) {
while (1) {
switch (_context4.prev = _context4.next) {
case 0:
_ref7 = _slicedToArray__default["default"](_ref5, 2), _ref7$ = _ref7[1], audioBufferSourceNode = _ref7$.audioBufferSourceNode, encoderId = _ref7$.encoderId, mediaStreamAudioSourceNode = _ref7$.mediaStreamAudioSourceNode, port = _ref7$.port, recorderAudioWorkletNode = _ref7$.recorderAudioWorkletNode;
mediaStreamAudioSourceNode.connect(recorderAudioWorkletNode);
_context4.next = 4;
return new Promise(function (resolve) {
audioBufferSourceNode.onended = resolve;
audioBufferSourceNode.connect(recorderAudioWorkletNode);
audioBufferSourceNode.start(audioContext.currentTime + length / audioContext.sampleRate);
});
case 4:
audioBufferSourceNode.disconnect(recorderAudioWorkletNode);
// Bug #17: Safari does throttle the processing on hidden tabs if there is no active audio output.
constantSourceNode = new standardizedAudioContext.ConstantSourceNode(audioContext, {
offset: 0
});
constantSourceNode.onended = function () {
return constantSourceNode.disconnect();
};
constantSourceNode.connect(audioContext.destination);
constantSourceNode.start();
_context4.next = 11;
return recorderAudioWorkletNode.record(port);
case 11:
if (timeslice !== undefined) {
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
}
return _context4.abrupt("return", {
constantSourceNode: constantSourceNode,
encoderId: encoderId,
mediaStreamAudioSourceNode: mediaStreamAudioSourceNode,
recorderAudioWorkletNode: recorderAudioWorkletNode
});
case 13:
case "end":
return _context4.stop();
}
return _regeneratorRuntime.wrap(function _callee4$(_context4) {
while (1) switch (_context4.prev = _context4.next) {
case 0:
_ref7 = _slicedToArray(_ref5, 2), _ref7$ = _ref7[1], audioBufferSourceNode = _ref7$.audioBufferSourceNode, encoderId = _ref7$.encoderId, mediaStreamAudioSourceNode = _ref7$.mediaStreamAudioSourceNode, port = _ref7$.port, recorderAudioWorkletNode = _ref7$.recorderAudioWorkletNode;
mediaStreamAudioSourceNode.connect(recorderAudioWorkletNode);
_context4.next = 4;
return new Promise(function (resolve) {
audioBufferSourceNode.onended = resolve;
audioBufferSourceNode.connect(recorderAudioWorkletNode);
audioBufferSourceNode.start(audioContext.currentTime + length / audioContext.sampleRate);
});
case 4:
audioBufferSourceNode.disconnect(recorderAudioWorkletNode);
// Bug #17: Safari does throttle the processing on hidden tabs if there is no active audio output.
constantSourceNode = new standardizedAudioContext.ConstantSourceNode(audioContext, {
offset: 0
});
constantSourceNode.onended = function () {
return constantSourceNode.disconnect();
};
constantSourceNode.connect(audioContext.destination);
constantSourceNode.start();
_context4.next = 11;
return recorderAudioWorkletNode.record(port);
case 11:
if (timeslice !== undefined) {
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
}
return _context4.abrupt("return", {
constantSourceNode: constantSourceNode,
encoderId: encoderId,
mediaStreamAudioSourceNode: mediaStreamAudioSourceNode,
recorderAudioWorkletNode: recorderAudioWorkletNode
});
case 13:
case "end":
return _context4.stop();
}

@@ -929,22 +908,20 @@ }, _callee4);

var requestNextPartialRecording = /*#__PURE__*/function () {
var _ref = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee(encoderId, timeslice) {
var _ref = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(encoderId, timeslice) {
var arrayBuffers;
return _regeneratorRuntime__default["default"].wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.next = 2;
return mediaEncoderHost.encode(encoderId, timeslice);
case 2:
arrayBuffers = _context.sent;
if (nativeMediaRecorder.state === 'inactive') {
bufferedArrayBuffers.push.apply(bufferedArrayBuffers, _toConsumableArray__default["default"](arrayBuffers));
} else {
dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
}
case 4:
case "end":
return _context.stop();
}
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) switch (_context.prev = _context.next) {
case 0:
_context.next = 2;
return mediaEncoderHost.encode(encoderId, timeslice);
case 2:
arrayBuffers = _context.sent;
if (nativeMediaRecorder.state === 'inactive') {
bufferedArrayBuffers.push.apply(bufferedArrayBuffers, _toConsumableArray(arrayBuffers));
} else {
dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
}
case 4:
case "end":
return _context.stop();
}

@@ -1024,73 +1001,71 @@ }, _callee);

promisedDataViewElementTypeEncoderIdAndPort = promisedDataViewElementTypeEncoderIdAndPort.then( /*#__PURE__*/function () {
var _ref4 = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee2(_ref3) {
var _ref4 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2(_ref3) {
var _ref3$dataView, dataView, _ref3$elementType, elementType, encoderId, port, arrayBuffer, currentDataView, lengthAndValue, value, _decodeWebMChunk, currentElementType, offset, contents, remainingDataView;
return _regeneratorRuntime__default["default"].wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
_ref3$dataView = _ref3.dataView, dataView = _ref3$dataView === void 0 ? null : _ref3$dataView, _ref3$elementType = _ref3.elementType, elementType = _ref3$elementType === void 0 ? null : _ref3$elementType, encoderId = _ref3.encoderId, port = _ref3.port;
_context2.next = 3;
return data.arrayBuffer();
case 3:
arrayBuffer = _context2.sent;
pendingInvocations -= 1;
currentDataView = dataView === null ? new multiBufferDataView.MultiBufferDataView([arrayBuffer]) : new multiBufferDataView.MultiBufferDataView([].concat(_toConsumableArray__default["default"](dataView.buffers), [arrayBuffer]), dataView.byteOffset);
if (!(!isRecording && nativeMediaRecorder.state === 'recording' && !isStopped)) {
_context2.next = 14;
break;
}
lengthAndValue = readVariableSizeInteger(currentDataView, 0);
if (!(lengthAndValue === null)) {
_context2.next = 10;
break;
}
return _context2.abrupt("return", {
dataView: currentDataView,
elementType: elementType,
encoderId: encoderId,
port: port
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) switch (_context2.prev = _context2.next) {
case 0:
_ref3$dataView = _ref3.dataView, dataView = _ref3$dataView === void 0 ? null : _ref3$dataView, _ref3$elementType = _ref3.elementType, elementType = _ref3$elementType === void 0 ? null : _ref3$elementType, encoderId = _ref3.encoderId, port = _ref3.port;
_context2.next = 3;
return data.arrayBuffer();
case 3:
arrayBuffer = _context2.sent;
pendingInvocations -= 1;
currentDataView = dataView === null ? new multiBufferDataView.MultiBufferDataView([arrayBuffer]) : new multiBufferDataView.MultiBufferDataView([].concat(_toConsumableArray(dataView.buffers), [arrayBuffer]), dataView.byteOffset);
if (!(!isRecording && nativeMediaRecorder.state === 'recording' && !isStopped)) {
_context2.next = 14;
break;
}
lengthAndValue = readVariableSizeInteger(currentDataView, 0);
if (!(lengthAndValue === null)) {
_context2.next = 10;
break;
}
return _context2.abrupt("return", {
dataView: currentDataView,
elementType: elementType,
encoderId: encoderId,
port: port
});
case 10:
value = lengthAndValue.value;
if (!(value !== 172351395)) {
_context2.next = 13;
break;
}
return _context2.abrupt("return", {
dataView: dataView,
elementType: elementType,
encoderId: encoderId,
port: port
});
case 13:
isRecording = true;
case 14:
_decodeWebMChunk = decodeWebMChunk(currentDataView, elementType, channelCount), currentElementType = _decodeWebMChunk.currentElementType, offset = _decodeWebMChunk.offset, contents = _decodeWebMChunk.contents;
remainingDataView = offset < currentDataView.byteLength ? new multiBufferDataView.MultiBufferDataView(currentDataView.buffers, currentDataView.byteOffset + offset) : null;
contents.forEach(function (content) {
return port.postMessage(content, content.map(function (_ref5) {
var buffer = _ref5.buffer;
return buffer;
}));
});
if (pendingInvocations === 0 && (nativeMediaRecorder.state === 'inactive' || isStopped)) {
mediaEncoderHost.encode(encoderId, null).then(function (arrayBuffers) {
dispatchDataAvailableEvent([].concat(bufferedArrayBuffers, _toConsumableArray(arrayBuffers)));
bufferedArrayBuffers.length = 0;
eventTarget.dispatchEvent(new Event('stop'));
});
case 10:
value = lengthAndValue.value;
if (!(value !== 172351395)) {
_context2.next = 13;
break;
}
return _context2.abrupt("return", {
dataView: dataView,
elementType: elementType,
encoderId: encoderId,
port: port
});
case 13:
isRecording = true;
case 14:
_decodeWebMChunk = decodeWebMChunk(currentDataView, elementType, channelCount), currentElementType = _decodeWebMChunk.currentElementType, offset = _decodeWebMChunk.offset, contents = _decodeWebMChunk.contents;
remainingDataView = offset < currentDataView.byteLength ? new multiBufferDataView.MultiBufferDataView(currentDataView.buffers, currentDataView.byteOffset + offset) : null;
contents.forEach(function (content) {
return port.postMessage(content, content.map(function (_ref5) {
var buffer = _ref5.buffer;
return buffer;
}));
});
if (pendingInvocations === 0 && (nativeMediaRecorder.state === 'inactive' || isStopped)) {
mediaEncoderHost.encode(encoderId, null).then(function (arrayBuffers) {
dispatchDataAvailableEvent([].concat(bufferedArrayBuffers, _toConsumableArray__default["default"](arrayBuffers)));
bufferedArrayBuffers.length = 0;
eventTarget.dispatchEvent(new Event('stop'));
});
port.postMessage([]);
port.close();
removeEventListener();
}
return _context2.abrupt("return", {
dataView: remainingDataView,
elementType: currentElementType,
encoderId: encoderId,
port: port
});
case 19:
case "end":
return _context2.stop();
}
port.postMessage([]);
port.close();
removeEventListener();
}
return _context2.abrupt("return", {
dataView: remainingDataView,
elementType: currentElementType,
encoderId: encoderId,
port: port
});
case 19:
case "end":
return _context2.stop();
}

@@ -1189,17 +1164,15 @@ }, _callee2);

var register = /*#__PURE__*/function () {
var _ref = _asyncToGenerator__default["default"]( /*#__PURE__*/_regeneratorRuntime__default["default"].mark(function _callee(port) {
return _regeneratorRuntime__default["default"].wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
_context.t0 = encoderRegexes;
_context.next = 3;
return mediaEncoderHost.register(port);
case 3:
_context.t1 = _context.sent;
_context.t0.push.call(_context.t0, _context.t1);
case 5:
case "end":
return _context.stop();
}
var _ref = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(port) {
return _regeneratorRuntime.wrap(function _callee$(_context) {
while (1) switch (_context.prev = _context.next) {
case 0:
_context.t0 = encoderRegexes;
_context.next = 3;
return mediaEncoderHost.register(port);
case 3:
_context.t1 = _context.sent;
_context.t0.push.call(_context.t0, _context.t1);
case 5:
case "end":
return _context.stop();
}

@@ -1217,4 +1190,2 @@ }, _callee);

Object.defineProperty(exports, '__esModule', { value: true });
}));

@@ -18,8 +18,8 @@ {

"dependencies": {
"@babel/runtime": "^7.20.6",
"media-encoder-host": "^8.0.82",
"multi-buffer-data-view": "^3.0.24",
"recorder-audio-worklet": "^5.1.34",
"standardized-audio-context": "^25.3.36",
"subscribable-things": "^2.1.11",
"@babel/runtime": "^7.20.7",
"media-encoder-host": "^8.0.83",
"multi-buffer-data-view": "^4.0.0",
"recorder-audio-worklet": "^5.1.35",
"standardized-audio-context": "^25.3.37",
"subscribable-things": "^2.1.12",
"tslib": "^2.4.1"

@@ -29,15 +29,15 @@ },

"devDependencies": {
"@babel/core": "^7.20.5",
"@babel/core": "^7.20.12",
"@babel/plugin-external-helpers": "^7.18.6",
"@babel/plugin-transform-runtime": "^7.19.6",
"@babel/preset-env": "^7.20.2",
"@commitlint/cli": "^17.3.0",
"@commitlint/config-angular": "^17.3.0",
"@rollup/plugin-babel": "^5.3.1",
"@commitlint/cli": "^17.4.2",
"@commitlint/config-angular": "^17.4.2",
"@rollup/plugin-babel": "^6.0.3",
"chai": "^4.3.7",
"commitizen": "^4.2.5",
"commitizen": "^4.2.6",
"cz-conventional-changelog": "^3.3.0",
"eslint": "^8.29.0",
"eslint-config-holy-grail": "^52.0.33",
"extendable-media-recorder-wav-encoder": "^7.0.80",
"eslint": "^8.31.0",
"eslint-config-holy-grail": "^55.0.2",
"extendable-media-recorder-wav-encoder": "^7.0.81",
"grunt": "^1.5.3",

@@ -47,3 +47,3 @@ "grunt-cli": "^1.4.3",

"grunt-sh": "^0.2.0",
"husky": "^8.0.2",
"husky": "^8.0.3",
"karma": "^6.4.1",

@@ -58,14 +58,14 @@ "karma-browserstack-launcher": "^1.6.0",

"load-grunt-config": "^4.0.1",
"mocha": "^10.1.0",
"prettier": "^2.8.0",
"mocha": "^10.2.0",
"prettier": "^2.8.3",
"pretty-quick": "^3.1.3",
"rimraf": "^3.0.2",
"rollup": "^2.79.1",
"sinon": "^14.0.2",
"rimraf": "^4.0.5",
"rollup": "^3.10.0",
"sinon": "^15.0.1",
"sinon-chai": "^3.7.0",
"ts-loader": "^9.4.2",
"tsconfig-holy-grail": "^11.1.36",
"tsconfig-holy-grail": "^12.0.0",
"tslint": "^6.1.3",
"tslint-config-holy-grail": "^53.2.34",
"typescript": "^4.9.3",
"tslint-config-holy-grail": "^54.0.0",
"typescript": "^4.9.4",
"webpack": "^5.75.0"

@@ -98,3 +98,3 @@ },

"types": "build/es2019/module.d.ts",
"version": "7.1.0"
"version": "7.1.1"
}

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc