Socket
Socket
Sign inDemoInstall

extendable-media-recorder

Package Overview
Dependencies
18
Maintainers
1
Versions
373
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 9.2.0 to 9.2.1

build/es2019/interfaces/audio-nodes-and-encoder-instance-id.d.ts

44

build/es2019/factories/web-audio-media-recorder.js

@@ -6,4 +6,4 @@ import { encode, instantiate } from 'media-encoder-host';

// @todo This should live in a separate file.
const createPromisedAudioNodesEncoderIdAndPort = async (audioBuffer, audioContext, channelCount, mediaStream, mimeType) => {
const { encoderId, port } = await instantiate(mimeType, audioContext.sampleRate);
const createPromisedAudioNodesEncoderInstanceIdAndPort = async (audioBuffer, audioContext, channelCount, mediaStream, mimeType) => {
const { encoderInstanceId, port } = await instantiate(mimeType, audioContext.sampleRate);
if (AudioWorkletNode === undefined) {

@@ -15,3 +15,3 @@ throw new Error(ERROR_MESSAGE);

const recorderAudioWorkletNode = createRecorderAudioWorkletNode(AudioWorkletNode, audioContext, { channelCount });
return { audioBufferSourceNode, encoderId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode };
return { audioBufferSourceNode, encoderInstanceId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode };
};

@@ -34,3 +34,3 @@ export const createWebAudioMediaRecorderFactory = (createBlobEvent, createInvalidModificationError, createInvalidStateError, createNotSupportedError) => {

let intervalId = null;
let promisedAudioNodesAndEncoderId = null;
let promisedAudioNodesAndEncoderInstanceId = null;
let promisedPartialRecording = null;

@@ -41,5 +41,5 @@ let isAudioContextRunning = true;

};
const requestNextPartialRecording = async (encoderId, timeslice) => {
const arrayBuffers = await encode(encoderId, timeslice);
if (promisedAudioNodesAndEncoderId === null) {
const requestNextPartialRecording = async (encoderInstanceId, timeslice) => {
const arrayBuffers = await encode(encoderInstanceId, timeslice);
if (promisedAudioNodesAndEncoderInstanceId === null) {
bufferedArrayBuffers.push(...arrayBuffers);

@@ -49,3 +49,3 @@ }

dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}

@@ -58,3 +58,3 @@ };

const stop = () => {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
return;

@@ -69,3 +69,3 @@ }

}
promisedAudioNodesAndEncoderId.then(async ({ encoderId, mediaStreamAudioSourceNode, recorderAudioWorkletNode }) => {
promisedAudioNodesAndEncoderInstanceId.then(async ({ encoderInstanceId, mediaStreamAudioSourceNode, recorderAudioWorkletNode }) => {
if (promisedPartialRecording !== null) {

@@ -79,4 +79,4 @@ promisedPartialRecording.catch(() => {

mediaStreamAudioSourceNode.disconnect(recorderAudioWorkletNode);
const arrayBuffers = await encode(encoderId, null);
if (promisedAudioNodesAndEncoderId === null) {
const arrayBuffers = await encode(encoderInstanceId, null);
if (promisedAudioNodesAndEncoderInstanceId === null) {
await suspend();

@@ -88,3 +88,3 @@ }

});
promisedAudioNodesAndEncoderId = null;
promisedAudioNodesAndEncoderInstanceId = null;
};

@@ -101,6 +101,6 @@ const suspend = () => {

get state() {
return promisedAudioNodesAndEncoderId === null ? 'inactive' : isAudioContextRunning ? 'recording' : 'paused';
return promisedAudioNodesAndEncoderInstanceId === null ? 'inactive' : isAudioContextRunning ? 'recording' : 'paused';
},
pause() {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
throw createInvalidStateError();

@@ -114,3 +114,3 @@ }

resume() {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
throw createInvalidStateError();

@@ -125,3 +125,3 @@ }

var _a;
if (promisedAudioNodesAndEncoderId !== null) {
if (promisedAudioNodesAndEncoderInstanceId !== null) {
throw createInvalidStateError();

@@ -135,6 +135,6 @@ }

const channelCount = audioTracks.length === 0 ? 2 : (_a = audioTracks[0].getSettings().channelCount) !== null && _a !== void 0 ? _a : 2;
promisedAudioNodesAndEncoderId = Promise.all([
promisedAudioNodesAndEncoderInstanceId = Promise.all([
resume(),
promisedAudioWorkletModule.then(() => createPromisedAudioNodesEncoderIdAndPort(audioBuffer, audioContext, channelCount, mediaStream, mimeType))
]).then(async ([, { audioBufferSourceNode, encoderId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode }]) => {
promisedAudioWorkletModule.then(() => createPromisedAudioNodesEncoderInstanceIdAndPort(audioBuffer, audioContext, channelCount, mediaStream, mimeType))
]).then(async ([, { audioBufferSourceNode, encoderInstanceId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode }]) => {
mediaStreamAudioSourceNode.connect(recorderAudioWorkletNode);

@@ -149,5 +149,5 @@ await new Promise((resolve) => {

if (timeslice !== undefined) {
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}
return { encoderId, mediaStreamAudioSourceNode, recorderAudioWorkletNode };
return { encoderInstanceId, mediaStreamAudioSourceNode, recorderAudioWorkletNode };
});

@@ -154,0 +154,0 @@ const tracks = mediaStream.getTracks();

@@ -13,4 +13,4 @@ import { encode, instantiate } from 'media-encoder-host';

};
const requestNextPartialRecording = async (encoderId, timeslice) => {
const arrayBuffers = await encode(encoderId, timeslice);
const requestNextPartialRecording = async (encoderInstanceId, timeslice) => {
const arrayBuffers = await encode(encoderInstanceId, timeslice);
if (nativeMediaRecorder.state === 'inactive') {

@@ -21,3 +21,3 @@ bufferedArrayBuffers.push(...arrayBuffers);

dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}

@@ -76,3 +76,3 @@ };

let pendingInvocations = 0;
let promisedDataViewElementTypeEncoderIdAndPort = instantiate(mimeType, sampleRate);
let promisedDataViewElementTypeEncoderInstanceIdAndPort = instantiate(mimeType, sampleRate);
stopRecording = () => {

@@ -84,3 +84,3 @@ isStopped = true;

const promisedArrayBuffer = data.arrayBuffer();
promisedDataViewElementTypeEncoderIdAndPort = promisedDataViewElementTypeEncoderIdAndPort.then(async ({ dataView = null, elementType = null, encoderId, port }) => {
promisedDataViewElementTypeEncoderInstanceIdAndPort = promisedDataViewElementTypeEncoderInstanceIdAndPort.then(async ({ dataView = null, elementType = null, encoderInstanceId, port }) => {
const arrayBuffer = await promisedArrayBuffer;

@@ -94,7 +94,7 @@ pendingInvocations -= 1;

if (lengthAndValue === null) {
return { dataView: currentDataView, elementType, encoderId, port };
return { dataView: currentDataView, elementType, encoderInstanceId, port };
}
const { value } = lengthAndValue;
if (value !== 172351395) {
return { dataView, elementType, encoderId, port };
return { dataView, elementType, encoderInstanceId, port };
}

@@ -109,3 +109,3 @@ isRecording = true;

if (pendingInvocations === 0 && (nativeMediaRecorder.state === 'inactive' || isStopped)) {
encode(encoderId, null).then((arrayBuffers) => {
encode(encoderInstanceId, null).then((arrayBuffers) => {
dispatchDataAvailableEvent([...bufferedArrayBuffers, ...arrayBuffers]);

@@ -119,7 +119,7 @@ bufferedArrayBuffers.length = 0;

}
return { dataView: remainingDataView, elementType: currentElementType, encoderId, port };
return { dataView: remainingDataView, elementType: currentElementType, encoderInstanceId, port };
});
});
if (timeslice !== undefined) {
promisedDataViewElementTypeEncoderIdAndPort.then(({ encoderId }) => (promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice)));
promisedDataViewElementTypeEncoderInstanceIdAndPort.then(({ encoderInstanceId }) => (promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice)));
}

@@ -126,0 +126,0 @@ }

@@ -1,2 +0,2 @@

export * from './audio-nodes-and-encoder-id';
export * from './audio-nodes-and-encoder-instance-id';
export * from './blob-event';

@@ -3,0 +3,0 @@ export * from './blob-event-init';

@@ -1,2 +0,2 @@

export * from './audio-nodes-and-encoder-id';
export * from './audio-nodes-and-encoder-instance-id';
export * from './blob-event';

@@ -3,0 +3,0 @@ export * from './blob-event-init';

@@ -26,3 +26,3 @@ export * from './blob-event-factory';

export * from './not-supported-error-factory';
export * from './promised-data-view-element-type-encoder-id-and-port';
export * from './promised-data-view-element-type-encoder-instance-id-and-port';
export * from './read-element-content-factory';

@@ -29,0 +29,0 @@ export * from './read-element-content-function';

@@ -26,3 +26,3 @@ export * from './blob-event-factory';

export * from './not-supported-error-factory';
export * from './promised-data-view-element-type-encoder-id-and-port';
export * from './promised-data-view-element-type-encoder-instance-id-and-port';
export * from './read-element-content-factory';

@@ -29,0 +29,0 @@ export * from './read-element-content-function';

@@ -652,5 +652,5 @@ (function (global, factory) {

// @todo This should live in a separate file.
var createPromisedAudioNodesEncoderIdAndPort = /*#__PURE__*/function () {
var createPromisedAudioNodesEncoderInstanceIdAndPort = /*#__PURE__*/function () {
var _ref = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(audioBuffer, audioContext, channelCount, mediaStream, mimeType) {
var _yield$instantiate, encoderId, port, audioBufferSourceNode, mediaStreamAudioSourceNode, recorderAudioWorkletNode;
var _yield$instantiate, encoderInstanceId, port, audioBufferSourceNode, mediaStreamAudioSourceNode, recorderAudioWorkletNode;
return _regeneratorRuntime.wrap(function _callee$(_context) {

@@ -663,3 +663,3 @@ while (1) switch (_context.prev = _context.next) {

_yield$instantiate = _context.sent;
encoderId = _yield$instantiate.encoderId;
encoderInstanceId = _yield$instantiate.encoderInstanceId;
port = _yield$instantiate.port;

@@ -683,3 +683,3 @@ if (!(standardizedAudioContext.AudioWorkletNode === undefined)) {

audioBufferSourceNode: audioBufferSourceNode,
encoderId: encoderId,
encoderInstanceId: encoderInstanceId,
mediaStreamAudioSourceNode: mediaStreamAudioSourceNode,

@@ -695,3 +695,3 @@ port: port,

}));
return function createPromisedAudioNodesEncoderIdAndPort(_x, _x2, _x3, _x4, _x5) {
return function createPromisedAudioNodesEncoderInstanceIdAndPort(_x, _x2, _x3, _x4, _x5) {
return _ref.apply(this, arguments);

@@ -722,3 +722,3 @@ };

var intervalId = null;
var promisedAudioNodesAndEncoderId = null;
var promisedAudioNodesAndEncoderInstanceId = null;
var promisedPartialRecording = null;

@@ -734,3 +734,3 @@ var isAudioContextRunning = true;

var requestNextPartialRecording = /*#__PURE__*/function () {
var _ref2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2(encoderId, timeslice) {
var _ref2 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2(encoderInstanceId, timeslice) {
var arrayBuffers;

@@ -741,10 +741,10 @@ return _regeneratorRuntime.wrap(function _callee2$(_context2) {

_context2.next = 2;
return mediaEncoderHost.encode(encoderId, timeslice);
return mediaEncoderHost.encode(encoderInstanceId, timeslice);
case 2:
arrayBuffers = _context2.sent;
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
bufferedArrayBuffers.push.apply(bufferedArrayBuffers, _toConsumableArray(arrayBuffers));
} else {
dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}

@@ -766,3 +766,3 @@ case 4:

var stop = function stop() {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
return;

@@ -777,9 +777,9 @@ }

}
promisedAudioNodesAndEncoderId.then( /*#__PURE__*/function () {
promisedAudioNodesAndEncoderInstanceId.then( /*#__PURE__*/function () {
var _ref4 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee3(_ref3) {
var encoderId, mediaStreamAudioSourceNode, recorderAudioWorkletNode, arrayBuffers;
var encoderInstanceId, mediaStreamAudioSourceNode, recorderAudioWorkletNode, arrayBuffers;
return _regeneratorRuntime.wrap(function _callee3$(_context3) {
while (1) switch (_context3.prev = _context3.next) {
case 0:
encoderId = _ref3.encoderId, mediaStreamAudioSourceNode = _ref3.mediaStreamAudioSourceNode, recorderAudioWorkletNode = _ref3.recorderAudioWorkletNode;
encoderInstanceId = _ref3.encoderInstanceId, mediaStreamAudioSourceNode = _ref3.mediaStreamAudioSourceNode, recorderAudioWorkletNode = _ref3.recorderAudioWorkletNode;
if (promisedPartialRecording !== null) {

@@ -796,6 +796,6 @@ promisedPartialRecording["catch"](function () {

_context3.next = 7;
return mediaEncoderHost.encode(encoderId, null);
return mediaEncoderHost.encode(encoderInstanceId, null);
case 7:
arrayBuffers = _context3.sent;
if (!(promisedAudioNodesAndEncoderId === null)) {
if (!(promisedAudioNodesAndEncoderInstanceId === null)) {
_context3.next = 11;

@@ -820,3 +820,3 @@ break;

}());
promisedAudioNodesAndEncoderId = null;
promisedAudioNodesAndEncoderInstanceId = null;
};

@@ -833,6 +833,6 @@ var suspend = function suspend() {

get state() {
return promisedAudioNodesAndEncoderId === null ? 'inactive' : isAudioContextRunning ? 'recording' : 'paused';
return promisedAudioNodesAndEncoderInstanceId === null ? 'inactive' : isAudioContextRunning ? 'recording' : 'paused';
},
pause: function pause() {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
throw createInvalidStateError();

@@ -846,3 +846,3 @@ }

resume: function resume() {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
throw createInvalidStateError();

@@ -857,3 +857,3 @@ }

var _a;
if (promisedAudioNodesAndEncoderId !== null) {
if (promisedAudioNodesAndEncoderInstanceId !== null) {
throw createInvalidStateError();

@@ -867,11 +867,11 @@ }

var channelCount = audioTracks.length === 0 ? 2 : (_a = audioTracks[0].getSettings().channelCount) !== null && _a !== void 0 ? _a : 2;
promisedAudioNodesAndEncoderId = Promise.all([_resume(), promisedAudioWorkletModule.then(function () {
return createPromisedAudioNodesEncoderIdAndPort(audioBuffer, audioContext, channelCount, mediaStream, mimeType);
promisedAudioNodesAndEncoderInstanceId = Promise.all([_resume(), promisedAudioWorkletModule.then(function () {
return createPromisedAudioNodesEncoderInstanceIdAndPort(audioBuffer, audioContext, channelCount, mediaStream, mimeType);
})]).then( /*#__PURE__*/function () {
var _ref6 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee4(_ref5) {
var _ref7, _ref7$, audioBufferSourceNode, encoderId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode;
var _ref7, _ref7$, audioBufferSourceNode, encoderInstanceId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode;
return _regeneratorRuntime.wrap(function _callee4$(_context4) {
while (1) switch (_context4.prev = _context4.next) {
case 0:
_ref7 = _slicedToArray(_ref5, 2), _ref7$ = _ref7[1], audioBufferSourceNode = _ref7$.audioBufferSourceNode, encoderId = _ref7$.encoderId, mediaStreamAudioSourceNode = _ref7$.mediaStreamAudioSourceNode, port = _ref7$.port, recorderAudioWorkletNode = _ref7$.recorderAudioWorkletNode;
_ref7 = _slicedToArray(_ref5, 2), _ref7$ = _ref7[1], audioBufferSourceNode = _ref7$.audioBufferSourceNode, encoderInstanceId = _ref7$.encoderInstanceId, mediaStreamAudioSourceNode = _ref7$.mediaStreamAudioSourceNode, port = _ref7$.port, recorderAudioWorkletNode = _ref7$.recorderAudioWorkletNode;
mediaStreamAudioSourceNode.connect(recorderAudioWorkletNode);

@@ -890,6 +890,6 @@ _context4.next = 4;

if (timeslice !== undefined) {
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}
return _context4.abrupt("return", {
encoderId: encoderId,
encoderInstanceId: encoderInstanceId,
mediaStreamAudioSourceNode: mediaStreamAudioSourceNode,

@@ -947,3 +947,3 @@ recorderAudioWorkletNode: recorderAudioWorkletNode

var requestNextPartialRecording = /*#__PURE__*/function () {
var _ref = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(encoderId, timeslice) {
var _ref = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee(encoderInstanceId, timeslice) {
var arrayBuffers;

@@ -954,3 +954,3 @@ return _regeneratorRuntime.wrap(function _callee$(_context) {

_context.next = 2;
return mediaEncoderHost.encode(encoderId, timeslice);
return mediaEncoderHost.encode(encoderInstanceId, timeslice);
case 2:

@@ -962,3 +962,3 @@ arrayBuffers = _context.sent;

dispatchDataAvailableEvent(arrayBuffers);
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}

@@ -1036,3 +1036,3 @@ case 4:

var pendingInvocations = 0;
var promisedDataViewElementTypeEncoderIdAndPort = mediaEncoderHost.instantiate(mimeType, sampleRate);
var promisedDataViewElementTypeEncoderInstanceIdAndPort = mediaEncoderHost.instantiate(mimeType, sampleRate);
stopRecording = function stopRecording() {

@@ -1045,9 +1045,9 @@ isStopped = true;

var promisedArrayBuffer = data.arrayBuffer();
promisedDataViewElementTypeEncoderIdAndPort = promisedDataViewElementTypeEncoderIdAndPort.then( /*#__PURE__*/function () {
promisedDataViewElementTypeEncoderInstanceIdAndPort = promisedDataViewElementTypeEncoderInstanceIdAndPort.then( /*#__PURE__*/function () {
var _ref4 = _asyncToGenerator( /*#__PURE__*/_regeneratorRuntime.mark(function _callee2(_ref3) {
var _ref3$dataView, dataView, _ref3$elementType, elementType, encoderId, port, arrayBuffer, currentDataView, lengthAndValue, value, _decodeWebMChunk, currentElementType, offset, contents, remainingDataView;
var _ref3$dataView, dataView, _ref3$elementType, elementType, encoderInstanceId, port, arrayBuffer, currentDataView, lengthAndValue, value, _decodeWebMChunk, currentElementType, offset, contents, remainingDataView;
return _regeneratorRuntime.wrap(function _callee2$(_context2) {
while (1) switch (_context2.prev = _context2.next) {
case 0:
_ref3$dataView = _ref3.dataView, dataView = _ref3$dataView === void 0 ? null : _ref3$dataView, _ref3$elementType = _ref3.elementType, elementType = _ref3$elementType === void 0 ? null : _ref3$elementType, encoderId = _ref3.encoderId, port = _ref3.port;
_ref3$dataView = _ref3.dataView, dataView = _ref3$dataView === void 0 ? null : _ref3$dataView, _ref3$elementType = _ref3.elementType, elementType = _ref3$elementType === void 0 ? null : _ref3$elementType, encoderInstanceId = _ref3.encoderInstanceId, port = _ref3.port;
_context2.next = 3;

@@ -1071,3 +1071,3 @@ return promisedArrayBuffer;

elementType: elementType,
encoderId: encoderId,
encoderInstanceId: encoderInstanceId,
port: port

@@ -1084,3 +1084,3 @@ });

elementType: elementType,
encoderId: encoderId,
encoderInstanceId: encoderInstanceId,
port: port

@@ -1100,3 +1100,3 @@ });

if (pendingInvocations === 0 && (nativeMediaRecorder.state === 'inactive' || isStopped)) {
mediaEncoderHost.encode(encoderId, null).then(function (arrayBuffers) {
mediaEncoderHost.encode(encoderInstanceId, null).then(function (arrayBuffers) {
dispatchDataAvailableEvent([].concat(bufferedArrayBuffers, _toConsumableArray(arrayBuffers)));

@@ -1113,3 +1113,3 @@ bufferedArrayBuffers.length = 0;

elementType: currentElementType,
encoderId: encoderId,
encoderInstanceId: encoderInstanceId,
port: port

@@ -1129,5 +1129,5 @@ });

if (timeslice !== undefined) {
promisedDataViewElementTypeEncoderIdAndPort.then(function (_ref6) {
var encoderId = _ref6.encoderId;
return promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedDataViewElementTypeEncoderInstanceIdAndPort.then(function (_ref6) {
var encoderInstanceId = _ref6.encoderInstanceId;
return promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
});

@@ -1134,0 +1134,0 @@ }

@@ -19,3 +19,3 @@ {

"@babel/runtime": "^7.24.4",
"media-encoder-host": "^8.1.0",
"media-encoder-host": "^9.0.0",
"multi-buffer-data-view": "^6.0.4",

@@ -33,4 +33,4 @@ "recorder-audio-worklet": "^6.0.26",

"@babel/preset-env": "^7.24.4",
"@commitlint/cli": "^18.6.1",
"@commitlint/config-angular": "^18.6.1",
"@commitlint/cli": "^19.3.0",
"@commitlint/config-angular": "^19.3.0",
"@rollup/plugin-babel": "^6.0.4",

@@ -41,3 +41,3 @@ "chai": "^4.3.10",

"eslint": "^8.57.0",
"eslint-config-holy-grail": "^59.0.4",
"eslint-config-holy-grail": "^59.0.7",
"extendable-media-recorder-wav-encoder": "^7.0.109",

@@ -61,3 +61,3 @@ "grunt": "^1.6.1",

"rimraf": "^5.0.5",
"rollup": "^4.14.2",
"rollup": "^4.16.4",
"sinon": "^17.0.1",

@@ -102,3 +102,3 @@ "sinon-chai": "^3.7.0",

"types": "build/es2019/module.d.ts",
"version": "9.2.0"
"version": "9.2.1"
}

@@ -13,3 +13,3 @@ import { encode, instantiate } from 'media-encoder-host';

} from 'standardized-audio-context';
import { IAudioNodesAndEncoderId } from '../interfaces';
import { IAudioNodesAndEncoderInstanceId } from '../interfaces';
import { TRecordingState, TWebAudioMediaRecorderFactoryFactory } from '../types';

@@ -20,3 +20,3 @@

// @todo This should live in a separate file.
const createPromisedAudioNodesEncoderIdAndPort = async (
const createPromisedAudioNodesEncoderInstanceIdAndPort = async (
audioBuffer: IAudioBuffer,

@@ -28,3 +28,3 @@ audioContext: IMinimalAudioContext,

) => {
const { encoderId, port } = await instantiate(mimeType, audioContext.sampleRate);
const { encoderInstanceId, port } = await instantiate(mimeType, audioContext.sampleRate);

@@ -39,3 +39,3 @@ if (AudioWorkletNode === undefined) {

return { audioBufferSourceNode, encoderId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode };
return { audioBufferSourceNode, encoderInstanceId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode };
};

@@ -65,3 +65,3 @@

let intervalId: null | number = null;
let promisedAudioNodesAndEncoderId: null | Promise<IAudioNodesAndEncoderId> = null;
let promisedAudioNodesAndEncoderInstanceId: null | Promise<IAudioNodesAndEncoderInstanceId> = null;
let promisedPartialRecording: null | Promise<void> = null;

@@ -74,6 +74,6 @@ let isAudioContextRunning = true;

const requestNextPartialRecording = async (encoderId: number, timeslice: number): Promise<void> => {
const arrayBuffers = await encode(encoderId, timeslice);
const requestNextPartialRecording = async (encoderInstanceId: number, timeslice: number): Promise<void> => {
const arrayBuffers = await encode(encoderInstanceId, timeslice);
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
bufferedArrayBuffers.push(...arrayBuffers);

@@ -83,3 +83,3 @@ } else {

promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}

@@ -95,3 +95,3 @@ };

const stop = (): void => {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
return;

@@ -109,28 +109,30 @@ }

promisedAudioNodesAndEncoderId.then(async ({ encoderId, mediaStreamAudioSourceNode, recorderAudioWorkletNode }) => {
if (promisedPartialRecording !== null) {
promisedPartialRecording.catch(() => {
/* @todo Only catch the errors caused by a duplicate call to encode. */
});
promisedPartialRecording = null;
}
promisedAudioNodesAndEncoderInstanceId.then(
async ({ encoderInstanceId, mediaStreamAudioSourceNode, recorderAudioWorkletNode }) => {
if (promisedPartialRecording !== null) {
promisedPartialRecording.catch(() => {
/* @todo Only catch the errors caused by a duplicate call to encode. */
});
promisedPartialRecording = null;
}
await recorderAudioWorkletNode.stop();
await recorderAudioWorkletNode.stop();
mediaStreamAudioSourceNode.disconnect(recorderAudioWorkletNode);
mediaStreamAudioSourceNode.disconnect(recorderAudioWorkletNode);
const arrayBuffers = await encode(encoderId, null);
const arrayBuffers = await encode(encoderInstanceId, null);
if (promisedAudioNodesAndEncoderId === null) {
await suspend();
}
if (promisedAudioNodesAndEncoderInstanceId === null) {
await suspend();
}
dispatchDataAvailableEvent([...bufferedArrayBuffers, ...arrayBuffers]);
dispatchDataAvailableEvent([...bufferedArrayBuffers, ...arrayBuffers]);
bufferedArrayBuffers.length = 0;
bufferedArrayBuffers.length = 0;
eventTarget.dispatchEvent(new Event('stop'));
});
eventTarget.dispatchEvent(new Event('stop'));
}
);
promisedAudioNodesAndEncoderId = null;
promisedAudioNodesAndEncoderInstanceId = null;
};

@@ -152,7 +154,7 @@

get state(): TRecordingState {
return promisedAudioNodesAndEncoderId === null ? 'inactive' : isAudioContextRunning ? 'recording' : 'paused';
return promisedAudioNodesAndEncoderInstanceId === null ? 'inactive' : isAudioContextRunning ? 'recording' : 'paused';
},
pause(): void {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
throw createInvalidStateError();

@@ -168,3 +170,3 @@ }

resume(): void {
if (promisedAudioNodesAndEncoderId === null) {
if (promisedAudioNodesAndEncoderInstanceId === null) {
throw createInvalidStateError();

@@ -180,3 +182,3 @@ }

start(timeslice?: number): void {
if (promisedAudioNodesAndEncoderId !== null) {
if (promisedAudioNodesAndEncoderInstanceId !== null) {
throw createInvalidStateError();

@@ -194,27 +196,32 @@ }

promisedAudioNodesAndEncoderId = Promise.all([
promisedAudioNodesAndEncoderInstanceId = Promise.all([
resume(),
promisedAudioWorkletModule.then(() =>
createPromisedAudioNodesEncoderIdAndPort(audioBuffer, audioContext, channelCount, mediaStream, mimeType)
createPromisedAudioNodesEncoderInstanceIdAndPort(audioBuffer, audioContext, channelCount, mediaStream, mimeType)
)
]).then(async ([, { audioBufferSourceNode, encoderId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode }]) => {
mediaStreamAudioSourceNode.connect(recorderAudioWorkletNode);
]).then(
async ([
,
{ audioBufferSourceNode, encoderInstanceId, mediaStreamAudioSourceNode, port, recorderAudioWorkletNode }
]) => {
mediaStreamAudioSourceNode.connect(recorderAudioWorkletNode);
await new Promise((resolve) => {
audioBufferSourceNode.onended = resolve;
audioBufferSourceNode.connect(recorderAudioWorkletNode);
audioBufferSourceNode.start(audioContext.currentTime + length / audioContext.sampleRate);
});
await new Promise((resolve) => {
audioBufferSourceNode.onended = resolve;
audioBufferSourceNode.connect(recorderAudioWorkletNode);
audioBufferSourceNode.start(audioContext.currentTime + length / audioContext.sampleRate);
});
audioBufferSourceNode.disconnect(recorderAudioWorkletNode);
audioBufferSourceNode.disconnect(recorderAudioWorkletNode);
await recorderAudioWorkletNode.record(port);
await recorderAudioWorkletNode.record(port);
if (timeslice !== undefined) {
promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
if (timeslice !== undefined) {
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}
return { encoderInstanceId, mediaStreamAudioSourceNode, recorderAudioWorkletNode };
}
);
return { encoderId, mediaStreamAudioSourceNode, recorderAudioWorkletNode };
});
const tracks = mediaStream.getTracks();

@@ -221,0 +228,0 @@

import { encode, instantiate } from 'media-encoder-host';
import { MultiBufferDataView } from 'multi-buffer-data-view';
import { on } from 'subscribable-things';
import { TPromisedDataViewElementTypeEncoderIdAndPort, TRecordingState, TWebmPcmMediaRecorderFactoryFactory } from '../types';
import { TPromisedDataViewElementTypeEncoderInstanceIdAndPort, TRecordingState, TWebmPcmMediaRecorderFactoryFactory } from '../types';

@@ -22,4 +22,4 @@ export const createWebmPcmMediaRecorderFactory: TWebmPcmMediaRecorderFactoryFactory = (

const requestNextPartialRecording = async (encoderId: number, timeslice: number): Promise<void> => {
const arrayBuffers = await encode(encoderId, timeslice);
const requestNextPartialRecording = async (encoderInstanceId: number, timeslice: number): Promise<void> => {
const arrayBuffers = await encode(encoderInstanceId, timeslice);

@@ -31,3 +31,3 @@ if (nativeMediaRecorder.state === 'inactive') {

promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice);
promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice);
}

@@ -102,6 +102,4 @@ };

let pendingInvocations = 0;
let promisedDataViewElementTypeEncoderIdAndPort: TPromisedDataViewElementTypeEncoderIdAndPort = instantiate(
mimeType,
sampleRate
);
let promisedDataViewElementTypeEncoderInstanceIdAndPort: TPromisedDataViewElementTypeEncoderInstanceIdAndPort =
instantiate(mimeType, sampleRate);

@@ -120,4 +118,4 @@ stopRecording = () => {

promisedDataViewElementTypeEncoderIdAndPort = promisedDataViewElementTypeEncoderIdAndPort.then(
async ({ dataView = null, elementType = null, encoderId, port }) => {
promisedDataViewElementTypeEncoderInstanceIdAndPort = promisedDataViewElementTypeEncoderInstanceIdAndPort.then(
async ({ dataView = null, elementType = null, encoderInstanceId, port }) => {
const arrayBuffer = await promisedArrayBuffer;

@@ -136,3 +134,3 @@

if (lengthAndValue === null) {
return { dataView: currentDataView, elementType, encoderId, port };
return { dataView: currentDataView, elementType, encoderInstanceId, port };
}

@@ -143,3 +141,3 @@

if (value !== 172351395) {
return { dataView, elementType, encoderId, port };
return { dataView, elementType, encoderInstanceId, port };
}

@@ -168,3 +166,3 @@

if (pendingInvocations === 0 && (nativeMediaRecorder.state === 'inactive' || isStopped)) {
encode(encoderId, null).then((arrayBuffers) => {
encode(encoderInstanceId, null).then((arrayBuffers) => {
dispatchDataAvailableEvent([...bufferedArrayBuffers, ...arrayBuffers]);

@@ -183,3 +181,3 @@

return { dataView: remainingDataView, elementType: currentElementType, encoderId, port };
return { dataView: remainingDataView, elementType: currentElementType, encoderInstanceId, port };
}

@@ -190,4 +188,5 @@ );

if (timeslice !== undefined) {
promisedDataViewElementTypeEncoderIdAndPort.then(
({ encoderId }) => (promisedPartialRecording = requestNextPartialRecording(encoderId, timeslice))
promisedDataViewElementTypeEncoderInstanceIdAndPort.then(
({ encoderInstanceId }) =>
(promisedPartialRecording = requestNextPartialRecording(encoderInstanceId, timeslice))
);

@@ -194,0 +193,0 @@ }

@@ -1,2 +0,2 @@

export * from './audio-nodes-and-encoder-id';
export * from './audio-nodes-and-encoder-instance-id';
export * from './blob-event';

@@ -3,0 +3,0 @@ export * from './blob-event-init';

@@ -26,3 +26,3 @@ export * from './blob-event-factory';

export * from './not-supported-error-factory';
export * from './promised-data-view-element-type-encoder-id-and-port';
export * from './promised-data-view-element-type-encoder-instance-id-and-port';
export * from './read-element-content-factory';

@@ -29,0 +29,0 @@ export * from './read-element-content-function';

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc