New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

meyda

Package Overview
Dependencies
Maintainers
4
Versions
61
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

meyda - npm Package Compare versions

Comparing version 5.1.8-beta.1 to 5.1.8

18

bin/cli.js

@@ -136,17 +136,11 @@ #! /usr/bin/env node

var wl = new WavLoader(
function(config) {
Meyda.sampleRate = config.sampleRate;
if (!opt.options.p) {
console.log("Sample rate recognized as: " + Meyda.sampleRate);
}
},
function (chunk) {
//convert to normal array so we can concatenate
var _chunk = typedToArray(chunk);
//check if chunk is bigger than frame
if (_chunk.length > FRAME_SIZE) {
// if so, we'll extract stuff from it frame by frame, until we're left with something that's short enough to buffer
while (_chunk.length > FRAME_SIZE) {
var frame = _chunk.slice(0, FRAME_SIZE);
_chunk.splice(0, HOP_SIZE);
extractFeatures(frame);
if (!opt.options.p) process.stdout.write("-");
frameCount++;
}
}
buffer = buffer.concat(_chunk);

@@ -153,0 +147,0 @@ //if we're long enough, splice the frame, and extract features on it

@@ -1,2 +0,2 @@

var WavManager = function (data_callback, end_callback) {
var WavManager = function (open_callback, data_callback, end_callback) {
var source = new Buffer(1);

@@ -33,2 +33,3 @@ var fs = require("fs");

//read wav headers
open_callback({sampleRate: format.sampleRate});
_bitDepth = format.bitDepth;

@@ -35,0 +36,0 @@ _numBytesPerSample = _bitDepth / 8;

(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.Meyda = factory());
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.Meyda = factory());
}(this, (function () { 'use strict';
function getAugmentedNamespace(n) {
if (n.__esModule) return n;
var a = Object.defineProperty({}, '__esModule', {value: true});
Object.keys(n).forEach(function (k) {
var d = Object.getOwnPropertyDescriptor(n, k);
Object.defineProperty(a, k, d.get ? d : {
enumerable: true,
get: function () {
return n[k];
}
});
});
return a;
}
function blackman(size) {
let blackmanBuffer = new Float32Array(size);
let coeff1 = 2 * Math.PI / (size - 1);
let coeff2 = 2 * coeff1; //According to http://uk.mathworks.com/help/signal/ref/blackman.html
//first half of the window
function blackman(size) {
let blackmanBuffer = new Float32Array(size);
let coeff1 = 2 * Math.PI / (size - 1);
let coeff2 = 2 * coeff1; //According to http://uk.mathworks.com/help/signal/ref/blackman.html
//first half of the window
for (let i = 0; i < size / 2; i++) {
blackmanBuffer[i] = 0.42 - 0.5 * Math.cos(i * coeff1) + 0.08 * Math.cos(i * coeff2);
} //second half of the window
for (let i = 0; i < size / 2; i++) {
blackmanBuffer[i] = 0.42 - 0.5 * Math.cos(i * coeff1) + 0.08 * Math.cos(i * coeff2);
} //second half of the window
for (let i = Math.ceil(size / 2); i > 0; i--) {
blackmanBuffer[size - i] = blackmanBuffer[i - 1];
}
for (let i = Math.ceil(size / 2); i > 0; i--) {
blackmanBuffer[size - i] = blackmanBuffer[i - 1];
}
return blackmanBuffer;
}
function sine(size) {
let coeff = Math.PI / (size - 1);
let sineBuffer = new Float32Array(size);
return blackmanBuffer;
}
function sine(size) {
let coeff = Math.PI / (size - 1);
let sineBuffer = new Float32Array(size);
for (let i = 0; i < size; i++) {
sineBuffer[i] = Math.sin(coeff * i);
}
for (let i = 0; i < size; i++) {
sineBuffer[i] = Math.sin(coeff * i);
}
return sineBuffer;
}
function hanning(size) {
let hanningBuffer = new Float32Array(size);
return sineBuffer;
}
function hanning(size) {
let hanningBuffer = new Float32Array(size);
for (let i = 0; i < size; i++) {
// According to the R documentation
// http://ugrad.stat.ubc.ca/R/library/e1071/html/hanning.window.html
hanningBuffer[i] = 0.5 - 0.5 * Math.cos(2 * Math.PI * i / (size - 1));
}
for (let i = 0; i < size; i++) {
// According to the R documentation
// http://ugrad.stat.ubc.ca/R/library/e1071/html/hanning.window.html
hanningBuffer[i] = 0.5 - 0.5 * Math.cos(2 * Math.PI * i / (size - 1));
}
return hanningBuffer;
}
function hamming(size) {
let hammingBuffer = new Float32Array(size);
return hanningBuffer;
}
function hamming(size) {
let hammingBuffer = new Float32Array(size);
for (let i = 0; i < size; i++) {
//According to http://uk.mathworks.com/help/signal/ref/hamming.html
hammingBuffer[i] = 0.54 - 0.46 * Math.cos(2 * Math.PI * (i / size - 1));
}
for (let i = 0; i < size; i++) {
//According to http://uk.mathworks.com/help/signal/ref/hamming.html
hammingBuffer[i] = 0.54 - 0.46 * Math.cos(2 * Math.PI * (i / size - 1));
}
return hammingBuffer;
}
return hammingBuffer;
}
var windowing = /*#__PURE__*/Object.freeze({
__proto__: null,
blackman: blackman,
sine: sine,
hanning: hanning,
hamming: hamming
});
var windowing = /*#__PURE__*/Object.freeze({
__proto__: null,
blackman: blackman,
sine: sine,
hanning: hanning,
hamming: hamming
});
let windows = {};
function isPowerOfTwo(num) {
while (num % 2 === 0 && num > 1) {
num /= 2;
}
let windows = {};
function isPowerOfTwo(num) {
while (num % 2 === 0 && num > 1) {
num /= 2;
}
return num === 1;
}
function pointwiseBufferMult(a, b) {
let c = [];
return num === 1;
}
function pointwiseBufferMult(a, b) {
let c = [];
for (let i = 0; i < Math.min(a.length, b.length); i++) {
c[i] = a[i] * b[i];
}
for (let i = 0; i < Math.min(a.length, b.length); i++) {
c[i] = a[i] * b[i];
}
return c;
}
function applyWindow(signal, windowname) {
if (windowname !== "rect") {
if (windowname === "" || !windowname) windowname = "hanning";
if (!windows[windowname]) windows[windowname] = {};
return c;
}
function applyWindow(signal, windowname) {
if (windowname !== "rect") {
if (windowname === "" || !windowname) windowname = "hanning";
if (!windows[windowname]) windows[windowname] = {};
if (!windows[windowname][signal.length]) {
try {
windows[windowname][signal.length] = windowing[windowname](signal.length);
} catch (e) {
throw new Error("Invalid windowing function");
}
}
if (!windows[windowname][signal.length]) {
try {
windows[windowname][signal.length] = windowing[windowname](signal.length);
} catch (e) {
throw new Error("Invalid windowing function");
}
}
signal = pointwiseBufferMult(signal, windows[windowname][signal.length]);
}
signal = pointwiseBufferMult(signal, windows[windowname][signal.length]);
}
return signal;
}
function createBarkScale(length, sampleRate, bufferSize) {
let barkScale = new Float32Array(length);
return signal;
}
function createBarkScale(length, sampleRate, bufferSize) {
let barkScale = new Float32Array(length);
for (var i = 0; i < barkScale.length; i++) {
barkScale[i] = i * sampleRate / bufferSize;
barkScale[i] = 13 * Math.atan(barkScale[i] / 1315.8) + 3.5 * Math.atan(Math.pow(barkScale[i] / 7518, 2));
}
for (var i = 0; i < barkScale.length; i++) {
barkScale[i] = i * sampleRate / bufferSize;
barkScale[i] = 13 * Math.atan(barkScale[i] / 1315.8) + 3.5 * Math.atan(Math.pow(barkScale[i] / 7518, 2));
}
return barkScale;
}
function arrayToTyped(t) {
// utility to convert arrays to typed F32 arrays
return Float32Array.from(t);
}
return barkScale;
}
function arrayToTyped(t) {
// utility to convert arrays to typed F32 arrays
return Float32Array.from(t);
}
function _melToFreq(melValue) {
var freqValue = 700 * (Math.exp(melValue / 1125) - 1);
return freqValue;
}
function _melToFreq(melValue) {
var freqValue = 700 * (Math.exp(melValue / 1125) - 1);
return freqValue;
}
function _freqToMel(freqValue) {
var melValue = 1125 * Math.log(1 + freqValue / 700);
return melValue;
}
function createMelFilterBank(numFilters, sampleRate, bufferSize) {
//the +2 is the upper and lower limits
let melValues = new Float32Array(numFilters + 2);
let melValuesInFreq = new Float32Array(numFilters + 2); //Generate limits in Hz - from 0 to the nyquist.
function _freqToMel(freqValue) {
var melValue = 1125 * Math.log(1 + freqValue / 700);
return melValue;
}
function createMelFilterBank(numFilters, sampleRate, bufferSize) {
//the +2 is the upper and lower limits
let melValues = new Float32Array(numFilters + 2);
let melValuesInFreq = new Float32Array(numFilters + 2); //Generate limits in Hz - from 0 to the nyquist.
let lowerLimitFreq = 0;
let upperLimitFreq = sampleRate / 2; //Convert the limits to Mel
let lowerLimitFreq = 0;
let upperLimitFreq = sampleRate / 2; //Convert the limits to Mel
let lowerLimitMel = _freqToMel(lowerLimitFreq);
let lowerLimitMel = _freqToMel(lowerLimitFreq);
let upperLimitMel = _freqToMel(upperLimitFreq); //Find the range
let upperLimitMel = _freqToMel(upperLimitFreq); //Find the range
let range = upperLimitMel - lowerLimitMel; //Find the range as part of the linear interpolation
let range = upperLimitMel - lowerLimitMel; //Find the range as part of the linear interpolation
let valueToAdd = range / (numFilters + 1);
let fftBinsOfFreq = Array(numFilters + 2);
let valueToAdd = range / (numFilters + 1);
let fftBinsOfFreq = Array(numFilters + 2);
for (let i = 0; i < melValues.length; i++) {
// Initialising the mel frequencies
// They're a linear interpolation between the lower and upper limits.
melValues[i] = i * valueToAdd; // Convert back to Hz
for (let i = 0; i < melValues.length; i++) {
// Initialising the mel frequencies
// They're a linear interpolation between the lower and upper limits.
melValues[i] = i * valueToAdd; // Convert back to Hz
melValuesInFreq[i] = _melToFreq(melValues[i]); // Find the corresponding bins
melValuesInFreq[i] = _melToFreq(melValues[i]); // Find the corresponding bins
fftBinsOfFreq[i] = Math.floor((bufferSize + 1) * melValuesInFreq[i] / sampleRate);
}
fftBinsOfFreq[i] = Math.floor((bufferSize + 1) * melValuesInFreq[i] / sampleRate);
}
var filterBank = Array(numFilters);
var filterBank = Array(numFilters);
for (let j = 0; j < filterBank.length; j++) {
// Create a two dimensional array of size numFilters * (buffersize/2)+1
// pre-populating the arrays with 0s.
filterBank[j] = Array.apply(null, new Array(bufferSize / 2 + 1)).map(Number.prototype.valueOf, 0); //creating the lower and upper slopes for each bin
for (let j = 0; j < filterBank.length; j++) {
// Create a two dimensional array of size numFilters * (buffersize/2)+1
// pre-populating the arrays with 0s.
filterBank[j] = Array.apply(null, new Array(bufferSize / 2 + 1)).map(Number.prototype.valueOf, 0); //creating the lower and upper slopes for each bin
for (let i = fftBinsOfFreq[j]; i < fftBinsOfFreq[j + 1]; i++) {
filterBank[j][i] = (i - fftBinsOfFreq[j]) / (fftBinsOfFreq[j + 1] - fftBinsOfFreq[j]);
}
for (let i = fftBinsOfFreq[j]; i < fftBinsOfFreq[j + 1]; i++) {
filterBank[j][i] = (i - fftBinsOfFreq[j]) / (fftBinsOfFreq[j + 1] - fftBinsOfFreq[j]);
}
for (let i = fftBinsOfFreq[j + 1]; i < fftBinsOfFreq[j + 2]; i++) {
filterBank[j][i] = (fftBinsOfFreq[j + 2] - i) / (fftBinsOfFreq[j + 2] - fftBinsOfFreq[j + 1]);
}
}
for (let i = fftBinsOfFreq[j + 1]; i < fftBinsOfFreq[j + 2]; i++) {
filterBank[j][i] = (fftBinsOfFreq[j + 2] - i) / (fftBinsOfFreq[j + 2] - fftBinsOfFreq[j + 1]);
}
}
return filterBank;
}
function hzToOctaves(freq, A440) {
return Math.log2(16 * freq / A440);
}
function normalizeByColumn(a) {
var emptyRow = a[0].map(() => 0);
var colDenominators = a.reduce((acc, row) => {
row.forEach((cell, j) => {
acc[j] += Math.pow(cell, 2);
});
return acc;
}, emptyRow).map(Math.sqrt);
return a.map((row, i) => row.map((v, j) => v / (colDenominators[j] || 1)));
}
function createChromaFilterBank(numFilters, sampleRate, bufferSize, centerOctave = 5, octaveWidth = 2, baseC = true, A440 = 440) {
var numOutputBins = Math.floor(bufferSize / 2) + 1;
var frequencyBins = new Array(bufferSize).fill(0).map((_, i) => numFilters * hzToOctaves(sampleRate * i / bufferSize, A440)); // Set a value for the 0 Hz bin that is 1.5 octaves below bin 1
// (so chroma is 50% rotated from bin 1, and bin width is broad)
return filterBank;
}
function hzToOctaves(freq, A440) {
return Math.log2(16 * freq / A440);
}
function normalizeByColumn(a) {
var emptyRow = a[0].map(() => 0);
var colDenominators = a.reduce((acc, row) => {
row.forEach((cell, j) => {
acc[j] += Math.pow(cell, 2);
});
return acc;
}, emptyRow).map(Math.sqrt);
return a.map((row, i) => row.map((v, j) => v / (colDenominators[j] || 1)));
}
function createChromaFilterBank(numFilters, sampleRate, bufferSize, centerOctave = 5, octaveWidth = 2, baseC = true, A440 = 440) {
var numOutputBins = Math.floor(bufferSize / 2) + 1;
var frequencyBins = new Array(bufferSize).fill(0).map((_, i) => numFilters * hzToOctaves(sampleRate * i / bufferSize, A440)); // Set a value for the 0 Hz bin that is 1.5 octaves below bin 1
// (so chroma is 50% rotated from bin 1, and bin width is broad)
frequencyBins[0] = frequencyBins[1] - 1.5 * numFilters;
var binWidthBins = frequencyBins.slice(1).map((v, i) => Math.max(v - frequencyBins[i]), 1).concat([1]);
var halfNumFilters = Math.round(numFilters / 2);
var filterPeaks = new Array(numFilters).fill(0).map((_, i) => frequencyBins.map(frq => (10 * numFilters + halfNumFilters + frq - i) % numFilters - halfNumFilters));
var weights = filterPeaks.map((row, i) => row.map((_, j) => Math.exp(-0.5 * Math.pow(2 * filterPeaks[i][j] / binWidthBins[j], 2))));
weights = normalizeByColumn(weights);
frequencyBins[0] = frequencyBins[1] - 1.5 * numFilters;
var binWidthBins = frequencyBins.slice(1).map((v, i) => Math.max(v - frequencyBins[i]), 1).concat([1]);
var halfNumFilters = Math.round(numFilters / 2);
var filterPeaks = new Array(numFilters).fill(0).map((_, i) => frequencyBins.map(frq => (10 * numFilters + halfNumFilters + frq - i) % numFilters - halfNumFilters));
var weights = filterPeaks.map((row, i) => row.map((_, j) => Math.exp(-0.5 * Math.pow(2 * filterPeaks[i][j] / binWidthBins[j], 2))));
weights = normalizeByColumn(weights);
if (octaveWidth) {
var octaveWeights = frequencyBins.map(v => Math.exp(-0.5 * Math.pow((v / numFilters - centerOctave) / octaveWidth, 2)));
weights = weights.map(row => row.map((cell, j) => cell * octaveWeights[j]));
}
if (octaveWidth) {
var octaveWeights = frequencyBins.map(v => Math.exp(-0.5 * Math.pow((v / numFilters - centerOctave) / octaveWidth, 2)));
weights = weights.map(row => row.map((cell, j) => cell * octaveWeights[j]));
}
if (baseC) {
weights = [...weights.slice(3), ...weights.slice(0, 3)];
}
if (baseC) {
weights = [...weights.slice(3), ...weights.slice(0, 3)];
}
return weights.map(row => row.slice(0, numOutputBins));
}
function frame(buffer, frameLength, hopLength) {
if (buffer.length < frameLength) {
throw new Error("Buffer is too short for frame length");
}
return weights.map(row => row.slice(0, numOutputBins));
}
function frame(buffer, frameLength, hopLength) {
if (buffer.length < frameLength) {
throw new Error("Buffer is too short for frame length");
}
if (hopLength < 1) {
throw new Error("Hop length cannot be less that 1");
}
if (hopLength < 1) {
throw new Error("Hop length cannot be less that 1");
}
if (frameLength < 1) {
throw new Error("Frame length cannot be less that 1");
}
if (frameLength < 1) {
throw new Error("Frame length cannot be less that 1");
}
const numFrames = 1 + Math.floor((buffer.length - frameLength) / hopLength);
return new Array(numFrames).fill(0).map((_, i) => buffer.slice(i * hopLength, i * hopLength + frameLength));
}
const numFrames = 1 + Math.floor((buffer.length - frameLength) / hopLength);
return new Array(numFrames).fill(0).map((_, i) => buffer.slice(i * hopLength, i * hopLength + frameLength));
}
function rms (args) {
if (typeof args.signal !== "object") {
throw new TypeError();
}
function rms (args) {
if (typeof args.signal !== "object") {
throw new TypeError();
}
var rms = 0;
var rms = 0;
for (var i = 0; i < args.signal.length; i++) {
rms += Math.pow(args.signal[i], 2);
}
for (var i = 0; i < args.signal.length; i++) {
rms += Math.pow(args.signal[i], 2);
}
rms = rms / args.signal.length;
rms = Math.sqrt(rms);
return rms;
}
rms = rms / args.signal.length;
rms = Math.sqrt(rms);
return rms;
}
function energy () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
function energy () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
var energy = 0;
var energy = 0;
for (var i = 0; i < arguments[0].signal.length; i++) {
energy += Math.pow(Math.abs(arguments[0].signal[i]), 2);
}
for (var i = 0; i < arguments[0].signal.length; i++) {
energy += Math.pow(Math.abs(arguments[0].signal[i]), 2);
}
return energy;
}
return energy;
}
function spectralSlope (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError();
} //linear regression
function spectralSlope (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError();
} //linear regression
let ampSum = 0;
let freqSum = 0;
let freqs = new Float32Array(args.ampSpectrum.length);
let powFreqSum = 0;
let ampFreqSum = 0;
let ampSum = 0;
let freqSum = 0;
let freqs = new Float32Array(args.ampSpectrum.length);
let powFreqSum = 0;
let ampFreqSum = 0;
for (var i = 0; i < args.ampSpectrum.length; i++) {
ampSum += args.ampSpectrum[i];
let curFreq = i * args.sampleRate / args.bufferSize;
freqs[i] = curFreq;
powFreqSum += curFreq * curFreq;
freqSum += curFreq;
ampFreqSum += curFreq * args.ampSpectrum[i];
}
for (var i = 0; i < args.ampSpectrum.length; i++) {
ampSum += args.ampSpectrum[i];
let curFreq = i * args.sampleRate / args.bufferSize;
freqs[i] = curFreq;
powFreqSum += curFreq * curFreq;
freqSum += curFreq;
ampFreqSum += curFreq * args.ampSpectrum[i];
}
return (args.ampSpectrum.length * ampFreqSum - freqSum * ampSum) / (ampSum * (powFreqSum - Math.pow(freqSum, 2)));
}
return (args.ampSpectrum.length * ampFreqSum - freqSum * ampSum) / (ampSum * (powFreqSum - Math.pow(freqSum, 2)));
}
function mu(i, amplitudeSpect) {
var numerator = 0;
var denominator = 0;
function mu(i, amplitudeSpect) {
var numerator = 0;
var denominator = 0;
for (var k = 0; k < amplitudeSpect.length; k++) {
numerator += Math.pow(k, i) * Math.abs(amplitudeSpect[k]);
denominator += amplitudeSpect[k];
}
for (var k = 0; k < amplitudeSpect.length; k++) {
numerator += Math.pow(k, i) * Math.abs(amplitudeSpect[k]);
denominator += amplitudeSpect[k];
}
return numerator / denominator;
}
return numerator / denominator;
}
function spectralCentroid () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
function spectralCentroid () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
return mu(1, arguments[0].ampSpectrum);
}
return mu(1, arguments[0].ampSpectrum);
}
function spectralRolloff () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
function spectralRolloff () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
var ampspec = arguments[0].ampSpectrum; //calculate nyquist bin
var ampspec = arguments[0].ampSpectrum; //calculate nyquist bin
var nyqBin = arguments[0].sampleRate / (2 * (ampspec.length - 1));
var ec = 0;
var nyqBin = arguments[0].sampleRate / (2 * (ampspec.length - 1));
var ec = 0;
for (var i = 0; i < ampspec.length; i++) {
ec += ampspec[i];
}
for (var i = 0; i < ampspec.length; i++) {
ec += ampspec[i];
}
var threshold = 0.99 * ec;
var n = ampspec.length - 1;
var threshold = 0.99 * ec;
var n = ampspec.length - 1;
while (ec > threshold && n >= 0) {
ec -= ampspec[n];
--n;
}
while (ec > threshold && n >= 0) {
ec -= ampspec[n];
--n;
}
return (n + 1) * nyqBin;
}
return (n + 1) * nyqBin;
}
function spectralFlatness () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
function spectralFlatness () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
var numerator = 0;
var denominator = 0;
var numerator = 0;
var denominator = 0;
for (var i = 0; i < arguments[0].ampSpectrum.length; i++) {
numerator += Math.log(arguments[0].ampSpectrum[i]);
denominator += arguments[0].ampSpectrum[i];
}
for (var i = 0; i < arguments[0].ampSpectrum.length; i++) {
numerator += Math.log(arguments[0].ampSpectrum[i]);
denominator += arguments[0].ampSpectrum[i];
}
return Math.exp(numerator / arguments[0].ampSpectrum.length) * arguments[0].ampSpectrum.length / denominator;
}
return Math.exp(numerator / arguments[0].ampSpectrum.length) * arguments[0].ampSpectrum.length / denominator;
}
function spectralSpread (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError();
}
function spectralSpread (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError();
}
return Math.sqrt(mu(2, args.ampSpectrum) - Math.pow(mu(1, args.ampSpectrum), 2));
}
return Math.sqrt(mu(2, args.ampSpectrum) - Math.pow(mu(1, args.ampSpectrum), 2));
}
function spectralSkewness (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError();
}
function spectralSkewness (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError();
}
var mu1 = mu(1, args.ampSpectrum);
var mu2 = mu(2, args.ampSpectrum);
var mu3 = mu(3, args.ampSpectrum);
var numerator = 2 * Math.pow(mu1, 3) - 3 * mu1 * mu2 + mu3;
var denominator = Math.pow(Math.sqrt(mu2 - Math.pow(mu1, 2)), 3);
return numerator / denominator;
}
var mu1 = mu(1, args.ampSpectrum);
var mu2 = mu(2, args.ampSpectrum);
var mu3 = mu(3, args.ampSpectrum);
var numerator = 2 * Math.pow(mu1, 3) - 3 * mu1 * mu2 + mu3;
var denominator = Math.pow(Math.sqrt(mu2 - Math.pow(mu1, 2)), 3);
return numerator / denominator;
}
function spectralKurtosis () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
function spectralKurtosis () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
var ampspec = arguments[0].ampSpectrum;
var mu1 = mu(1, ampspec);
var mu2 = mu(2, ampspec);
var mu3 = mu(3, ampspec);
var mu4 = mu(4, ampspec);
var numerator = -3 * Math.pow(mu1, 4) + 6 * mu1 * mu2 - 4 * mu1 * mu3 + mu4;
var denominator = Math.pow(Math.sqrt(mu2 - Math.pow(mu1, 2)), 4);
return numerator / denominator;
}
var ampspec = arguments[0].ampSpectrum;
var mu1 = mu(1, ampspec);
var mu2 = mu(2, ampspec);
var mu3 = mu(3, ampspec);
var mu4 = mu(4, ampspec);
var numerator = -3 * Math.pow(mu1, 4) + 6 * mu1 * mu2 - 4 * mu1 * mu3 + mu4;
var denominator = Math.pow(Math.sqrt(mu2 - Math.pow(mu1, 2)), 4);
return numerator / denominator;
}
function zcr () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
function zcr () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
var zcr = 0;
var zcr = 0;
for (var i = 1; i < arguments[0].signal.length; i++) {
if (arguments[0].signal[i - 1] >= 0 && arguments[0].signal[i] < 0 || arguments[0].signal[i - 1] < 0 && arguments[0].signal[i] >= 0) {
zcr++;
}
}
for (var i = 1; i < arguments[0].signal.length; i++) {
if (arguments[0].signal[i - 1] >= 0 && arguments[0].signal[i] < 0 || arguments[0].signal[i - 1] < 0 && arguments[0].signal[i] >= 0) {
zcr++;
}
}
return zcr;
}
return zcr;
}
function loudness (args) {
if (typeof args.ampSpectrum !== "object" || typeof args.barkScale !== "object") {
throw new TypeError();
}
function loudness (args) {
if (typeof args.ampSpectrum !== "object" || typeof args.barkScale !== "object") {
throw new TypeError();
}
var NUM_BARK_BANDS = 24;
var specific = new Float32Array(NUM_BARK_BANDS);
var total = 0;
var normalisedSpectrum = args.ampSpectrum;
var bbLimits = new Int32Array(NUM_BARK_BANDS + 1);
bbLimits[0] = 0;
var currentBandEnd = args.barkScale[normalisedSpectrum.length - 1] / NUM_BARK_BANDS;
var currentBand = 1;
var NUM_BARK_BANDS = 24;
var specific = new Float32Array(NUM_BARK_BANDS);
var total = 0;
var normalisedSpectrum = args.ampSpectrum;
var bbLimits = new Int32Array(NUM_BARK_BANDS + 1);
bbLimits[0] = 0;
var currentBandEnd = args.barkScale[normalisedSpectrum.length - 1] / NUM_BARK_BANDS;
var currentBand = 1;
for (let i = 0; i < normalisedSpectrum.length; i++) {
while (args.barkScale[i] > currentBandEnd) {
bbLimits[currentBand++] = i;
currentBandEnd = currentBand * args.barkScale[normalisedSpectrum.length - 1] / NUM_BARK_BANDS;
}
}
for (let i = 0; i < normalisedSpectrum.length; i++) {
while (args.barkScale[i] > currentBandEnd) {
bbLimits[currentBand++] = i;
currentBandEnd = currentBand * args.barkScale[normalisedSpectrum.length - 1] / NUM_BARK_BANDS;
}
}
bbLimits[NUM_BARK_BANDS] = normalisedSpectrum.length - 1; //process
bbLimits[NUM_BARK_BANDS] = normalisedSpectrum.length - 1; //process
for (let i = 0; i < NUM_BARK_BANDS; i++) {
let sum = 0;
for (let i = 0; i < NUM_BARK_BANDS; i++) {
let sum = 0;
for (let j = bbLimits[i]; j < bbLimits[i + 1]; j++) {
sum += normalisedSpectrum[j];
}
for (let j = bbLimits[i]; j < bbLimits[i + 1]; j++) {
sum += normalisedSpectrum[j];
}
specific[i] = Math.pow(sum, 0.23);
} //get total loudness
specific[i] = Math.pow(sum, 0.23);
} //get total loudness
for (let i = 0; i < specific.length; i++) {
total += specific[i];
}
for (let i = 0; i < specific.length; i++) {
total += specific[i];
}
return {
specific: specific,
total: total
};
}
return {
specific: specific,
total: total
};
}
function perceptualSpread () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
function perceptualSpread () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
var loudnessValue = loudness(arguments[0]);
var max = 0;
var loudnessValue = loudness(arguments[0]);
var max = 0;
for (var i = 0; i < loudnessValue.specific.length; i++) {
if (loudnessValue.specific[i] > max) {
max = loudnessValue.specific[i];
}
}
for (var i = 0; i < loudnessValue.specific.length; i++) {
if (loudnessValue.specific[i] > max) {
max = loudnessValue.specific[i];
}
}
var spread = Math.pow((loudnessValue.total - max) / loudnessValue.total, 2);
return spread;
}
var spread = Math.pow((loudnessValue.total - max) / loudnessValue.total, 2);
return spread;
}
function perceptualSharpness () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
function perceptualSharpness () {
if (typeof arguments[0].signal !== "object") {
throw new TypeError();
}
var loudnessValue = loudness(arguments[0]);
var spec = loudnessValue.specific;
var output = 0;
var loudnessValue = loudness(arguments[0]);
var spec = loudnessValue.specific;
var output = 0;
for (var i = 0; i < spec.length; i++) {
if (i < 15) {
output += (i + 1) * spec[i + 1];
} else {
output += 0.066 * Math.exp(0.171 * (i + 1));
}
}
for (var i = 0; i < spec.length; i++) {
if (i < 15) {
output += (i + 1) * spec[i + 1];
} else {
output += 0.066 * Math.exp(0.171 * (i + 1));
}
}
output *= 0.11 / loudnessValue.total;
return output;
}
output *= 0.11 / loudnessValue.total;
return output;
}
function powerSpectrum () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
function powerSpectrum () {
if (typeof arguments[0].ampSpectrum !== "object") {
throw new TypeError();
}
var powerSpectrum = new Float32Array(arguments[0].ampSpectrum.length);
var powerSpectrum = new Float32Array(arguments[0].ampSpectrum.length);
for (var i = 0; i < powerSpectrum.length; i++) {
powerSpectrum[i] = Math.pow(arguments[0].ampSpectrum[i], 2);
}
for (var i = 0; i < powerSpectrum.length; i++) {
powerSpectrum[i] = Math.pow(arguments[0].ampSpectrum[i], 2);
}
return powerSpectrum;
}
return powerSpectrum;
}
/*===========================================================================*\
* Discrete Cosine Transform
*
* (c) Vail Systems. Joshua Jung and Ben Bryan. 2015
*
* This code is not designed to be highly optimized but as an educational
* tool to understand the Mel-scale and its related coefficients used in
* human speech analysis.
\*===========================================================================*/
var cosMap = null; // Builds a cosine map for the given input size. This allows multiple input sizes to be memoized automagically
// if you want to run the DCT over and over.
/*===========================================================================*\
* Discrete Cosine Transform
*
* (c) Vail Systems. Joshua Jung and Ben Bryan. 2015
*
* This code is not designed to be highly optimized but as an educational
* tool to understand the Mel-scale and its related coefficients used in
* human speech analysis.
\*===========================================================================*/
var cosMap = null; // Builds a cosine map for the given input size. This allows multiple input sizes to be memoized automagically
// if you want to run the DCT over and over.
var memoizeCosines = function (N) {
cosMap = cosMap || {};
cosMap[N] = new Array(N * N);
var PI_N = Math.PI / N;
var memoizeCosines = function (N) {
cosMap = cosMap || {};
cosMap[N] = new Array(N * N);
var PI_N = Math.PI / N;
for (var k = 0; k < N; k++) {
for (var n = 0; n < N; n++) {
cosMap[N][n + k * N] = Math.cos(PI_N * (n + 0.5) * k);
}
}
};
for (var k = 0; k < N; k++) {
for (var n = 0; n < N; n++) {
cosMap[N][n + k * N] = Math.cos(PI_N * (n + 0.5) * k);
}
}
};
function dct$1(signal, scale) {
var L = signal.length;
scale = scale || 2;
if (!cosMap || !cosMap[L]) memoizeCosines(L);
var coefficients = signal.map(function () {
return 0;
});
return coefficients.map(function (__, ix) {
return scale * signal.reduce(function (prev, cur, ix_, arr) {
return prev + cur * cosMap[L][ix_ + ix * L];
}, 0);
});
}
var dct_1 = dct$1;
function dct$1(signal, scale) {
var L = signal.length;
scale = scale || 2;
if (!cosMap || !cosMap[L]) memoizeCosines(L);
var coefficients = signal.map(function () {
return 0;
});
return coefficients.map(function (__, ix) {
return scale * signal.reduce(function (prev, cur, ix_, arr) {
return prev + cur * cosMap[L][ix_ + ix * L];
}, 0);
});
}
var dct_1 = dct$1;
var dct = dct_1;
var dct = dct_1;
function mfcc (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError("Valid ampSpectrum is required to generate MFCC");
}
function mfcc (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError("Valid ampSpectrum is required to generate MFCC");
}
if (typeof args.melFilterBank !== "object") {
throw new TypeError("Valid melFilterBank is required to generate MFCC");
}
if (typeof args.melFilterBank !== "object") {
throw new TypeError("Valid melFilterBank is required to generate MFCC");
}
let numberOfMFCCCoefficients = Math.min(40, Math.max(1, args.numberOfMFCCCoefficients || 13)); // Tutorial from:
// http://practicalcryptography.com/miscellaneous/machine-learning
// /guide-mel-frequency-cepstral-coefficients-mfccs/
let numberOfMFCCCoefficients = Math.min(40, Math.max(1, args.numberOfMFCCCoefficients || 13)); // Tutorial from:
// http://practicalcryptography.com/miscellaneous/machine-learning
// /guide-mel-frequency-cepstral-coefficients-mfccs/
let powSpec = powerSpectrum(args);
let numFilters = args.melFilterBank.length;
let filtered = Array(numFilters);
let powSpec = powerSpectrum(args);
let numFilters = args.melFilterBank.length;
let filtered = Array(numFilters);
if (numFilters < numberOfMFCCCoefficients) {
throw new Error("Insufficient filter bank for requested number of coefficients");
}
if (numFilters < numberOfMFCCCoefficients) {
throw new Error("Insufficient filter bank for requested number of coefficients");
}
let loggedMelBands = new Float32Array(numFilters);
let loggedMelBands = new Float32Array(numFilters);
for (let i = 0; i < loggedMelBands.length; i++) {
filtered[i] = new Float32Array(args.bufferSize / 2);
loggedMelBands[i] = 0;
for (let i = 0; i < loggedMelBands.length; i++) {
filtered[i] = new Float32Array(args.bufferSize / 2);
loggedMelBands[i] = 0;
for (let j = 0; j < args.bufferSize / 2; j++) {
//point-wise multiplication between power spectrum and filterbanks.
filtered[i][j] = args.melFilterBank[i][j] * powSpec[j]; //summing up all of the coefficients into one array
for (let j = 0; j < args.bufferSize / 2; j++) {
//point-wise multiplication between power spectrum and filterbanks.
filtered[i][j] = args.melFilterBank[i][j] * powSpec[j]; //summing up all of the coefficients into one array
loggedMelBands[i] += filtered[i][j];
} //log each coefficient.
loggedMelBands[i] += filtered[i][j];
} //log each coefficient.
loggedMelBands[i] = Math.log(loggedMelBands[i] + 1);
} //dct
loggedMelBands[i] = Math.log(loggedMelBands[i] + 1);
} //dct
let loggedMelBandsArray = Array.prototype.slice.call(loggedMelBands);
let mfccs = dct(loggedMelBandsArray).slice(0, numberOfMFCCCoefficients);
return mfccs;
}
let loggedMelBandsArray = Array.prototype.slice.call(loggedMelBands);
let mfccs = dct(loggedMelBandsArray).slice(0, numberOfMFCCCoefficients);
return mfccs;
}
function chroma (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError("Valid ampSpectrum is required to generate chroma");
}
function chroma (args) {
if (typeof args.ampSpectrum !== "object") {
throw new TypeError("Valid ampSpectrum is required to generate chroma");
}
if (typeof args.chromaFilterBank !== "object") {
throw new TypeError("Valid chromaFilterBank is required to generate chroma");
}
if (typeof args.chromaFilterBank !== "object") {
throw new TypeError("Valid chromaFilterBank is required to generate chroma");
}
var chromagram = args.chromaFilterBank.map((row, i) => args.ampSpectrum.reduce((acc, v, j) => acc + v * row[j], 0));
var maxVal = Math.max(...chromagram);
return maxVal ? chromagram.map(v => v / maxVal) : chromagram;
}
var chromagram = args.chromaFilterBank.map((row, i) => args.ampSpectrum.reduce((acc, v, j) => acc + v * row[j], 0));
var maxVal = Math.max(...chromagram);
return maxVal ? chromagram.map(v => v / maxVal) : chromagram;
}
function spectralFlux (args) {
if (typeof args.signal !== "object" || typeof args.previousSignal != "object") {
throw new TypeError();
}
function spectralFlux (args) {
if (typeof args.signal !== "object" || typeof args.previousSignal != "object") {
throw new TypeError();
}
let sf = 0;
let sf = 0;
for (let i = -(args.bufferSize / 2); i < signal.length / 2 - 1; i++) {
x = Math.abs(args.signal[i]) - Math.abs(args.previousSignal[i]);
sf += (x + Math.abs(x)) / 2;
}
for (let i = -(args.bufferSize / 2); i < signal.length / 2 - 1; i++) {
x = Math.abs(args.signal[i]) - Math.abs(args.previousSignal[i]);
sf += (x + Math.abs(x)) / 2;
}
return sf;
}
return sf;
}
let buffer = function (args) {
return args.signal;
};
let buffer = function (args) {
return args.signal;
};
let complexSpectrum = function (args) {
return args.complexSpectrum;
};
let complexSpectrum = function (args) {
return args.complexSpectrum;
};
let amplitudeSpectrum = function (args) {
return args.ampSpectrum;
};
let amplitudeSpectrum = function (args) {
return args.ampSpectrum;
};
var extractors = /*#__PURE__*/Object.freeze({
__proto__: null,
buffer: buffer,
rms: rms,
energy: energy,
complexSpectrum: complexSpectrum,
spectralSlope: spectralSlope,
spectralCentroid: spectralCentroid,
spectralRolloff: spectralRolloff,
spectralFlatness: spectralFlatness,
spectralSpread: spectralSpread,
spectralSkewness: spectralSkewness,
spectralKurtosis: spectralKurtosis,
amplitudeSpectrum: amplitudeSpectrum,
zcr: zcr,
loudness: loudness,
perceptualSpread: perceptualSpread,
perceptualSharpness: perceptualSharpness,
powerSpectrum: powerSpectrum,
mfcc: mfcc,
chroma: chroma,
spectralFlux: spectralFlux
});
var extractors = /*#__PURE__*/Object.freeze({
__proto__: null,
buffer: buffer,
rms: rms,
energy: energy,
complexSpectrum: complexSpectrum,
spectralSlope: spectralSlope,
spectralCentroid: spectralCentroid,
spectralRolloff: spectralRolloff,
spectralFlatness: spectralFlatness,
spectralSpread: spectralSpread,
spectralSkewness: spectralSkewness,
spectralKurtosis: spectralKurtosis,
amplitudeSpectrum: amplitudeSpectrum,
zcr: zcr,
loudness: loudness,
perceptualSpread: perceptualSpread,
perceptualSharpness: perceptualSharpness,
powerSpectrum: powerSpectrum,
mfcc: mfcc,
chroma: chroma,
spectralFlux: spectralFlux
});
function _toConsumableArray(arr) {
if (Array.isArray(arr)) {
for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) {
arr2[i] = arr[i];
}
function _toConsumableArray(arr) {
if (Array.isArray(arr)) {
for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) {
arr2[i] = arr[i];
}
return arr2;
} else {
return Array.from(arr);
}
}
return arr2;
} else {
return Array.from(arr);
}
}
var memoizedReversal = {};
var memoizedZeroBuffers = {};
var memoizedReversal = {};
var memoizedZeroBuffers = {};
var constructComplexArray = function constructComplexArray(signal) {
var complexSignal = {};
complexSignal.real = signal.real === undefined ? signal.slice() : signal.real.slice();
var bufferSize = complexSignal.real.length;
var constructComplexArray = function constructComplexArray(signal) {
var complexSignal = {};
complexSignal.real = signal.real === undefined ? signal.slice() : signal.real.slice();
var bufferSize = complexSignal.real.length;
if (memoizedZeroBuffers[bufferSize] === undefined) {
memoizedZeroBuffers[bufferSize] = Array.apply(null, Array(bufferSize)).map(Number.prototype.valueOf, 0);
}
if (memoizedZeroBuffers[bufferSize] === undefined) {
memoizedZeroBuffers[bufferSize] = Array.apply(null, Array(bufferSize)).map(Number.prototype.valueOf, 0);
}
complexSignal.imag = memoizedZeroBuffers[bufferSize].slice();
return complexSignal;
};
complexSignal.imag = memoizedZeroBuffers[bufferSize].slice();
return complexSignal;
};
var bitReverseArray = function bitReverseArray(N) {
if (memoizedReversal[N] === undefined) {
var maxBinaryLength = (N - 1).toString(2).length; //get the binary length of the largest index.
var bitReverseArray = function bitReverseArray(N) {
if (memoizedReversal[N] === undefined) {
var maxBinaryLength = (N - 1).toString(2).length; //get the binary length of the largest index.
var templateBinary = '0'.repeat(maxBinaryLength); //create a template binary of that length.
var templateBinary = '0'.repeat(maxBinaryLength); //create a template binary of that length.
var reversed = {};
var reversed = {};
for (var n = 0; n < N; n++) {
var currBinary = n.toString(2); //get binary value of current index.
//prepend zeros from template to current binary. This makes binary values of all indices have the same length.
for (var n = 0; n < N; n++) {
var currBinary = n.toString(2); //get binary value of current index.
//prepend zeros from template to current binary. This makes binary values of all indices have the same length.
currBinary = templateBinary.substr(currBinary.length) + currBinary;
currBinary = [].concat(_toConsumableArray(currBinary)).reverse().join(''); //reverse
currBinary = templateBinary.substr(currBinary.length) + currBinary;
currBinary = [].concat(_toConsumableArray(currBinary)).reverse().join(''); //reverse
reversed[n] = parseInt(currBinary, 2); //convert to decimal
}
reversed[n] = parseInt(currBinary, 2); //convert to decimal
}
memoizedReversal[N] = reversed; //save
}
memoizedReversal[N] = reversed; //save
}
return memoizedReversal[N];
}; // complex multiplication
return memoizedReversal[N];
}; // complex multiplication
var multiply = function multiply(a, b) {
return {
'real': a.real * b.real - a.imag * b.imag,
'imag': a.real * b.imag + a.imag * b.real
};
}; // complex addition
var multiply = function multiply(a, b) {
return {
'real': a.real * b.real - a.imag * b.imag,
'imag': a.real * b.imag + a.imag * b.real
};
}; // complex addition
var add = function add(a, b) {
return {
'real': a.real + b.real,
'imag': a.imag + b.imag
};
}; // complex subtraction
var add = function add(a, b) {
return {
'real': a.real + b.real,
'imag': a.imag + b.imag
};
}; // complex subtraction
var subtract = function subtract(a, b) {
return {
'real': a.real - b.real,
'imag': a.imag - b.imag
};
}; // euler's identity e^x = cos(x) + sin(x)
var subtract = function subtract(a, b) {
return {
'real': a.real - b.real,
'imag': a.imag - b.imag
};
}; // euler's identity e^x = cos(x) + sin(x)
var euler = function euler(kn, N) {
var x = -2 * Math.PI * kn / N;
return {
'real': Math.cos(x),
'imag': Math.sin(x)
};
}; // complex conjugate
var euler = function euler(kn, N) {
var x = -2 * Math.PI * kn / N;
return {
'real': Math.cos(x),
'imag': Math.sin(x)
};
}; // complex conjugate
var conj = function conj(a) {
a.imag *= -1;
return a;
};
var conj = function conj(a) {
a.imag *= -1;
return a;
};
var utils$1 = {
bitReverseArray: bitReverseArray,
multiply: multiply,
add: add,
subtract: subtract,
euler: euler,
conj: conj,
constructComplexArray: constructComplexArray
};
var utils$1 = {
bitReverseArray: bitReverseArray,
multiply: multiply,
add: add,
subtract: subtract,
euler: euler,
conj: conj,
constructComplexArray: constructComplexArray
};
var utils = utils$1; // real to complex fft
var utils = utils$1; // real to complex fft
var fft = function fft(signal) {
var complexSignal = {};
var fft = function fft(signal) {
var complexSignal = {};
if (signal.real === undefined || signal.imag === undefined) {
complexSignal = utils.constructComplexArray(signal);
} else {
complexSignal.real = signal.real.slice();
complexSignal.imag = signal.imag.slice();
}
if (signal.real === undefined || signal.imag === undefined) {
complexSignal = utils.constructComplexArray(signal);
} else {
complexSignal.real = signal.real.slice();
complexSignal.imag = signal.imag.slice();
}
var N = complexSignal.real.length;
var logN = Math.log2(N);
if (Math.round(logN) != logN) throw new Error('Input size must be a power of 2.');
var N = complexSignal.real.length;
var logN = Math.log2(N);
if (Math.round(logN) != logN) throw new Error('Input size must be a power of 2.');
if (complexSignal.real.length != complexSignal.imag.length) {
throw new Error('Real and imaginary components must have the same length.');
}
if (complexSignal.real.length != complexSignal.imag.length) {
throw new Error('Real and imaginary components must have the same length.');
}
var bitReversedIndices = utils.bitReverseArray(N); // sort array
var bitReversedIndices = utils.bitReverseArray(N); // sort array
var ordered = {
'real': [],
'imag': []
};
var ordered = {
'real': [],
'imag': []
};
for (var i = 0; i < N; i++) {
ordered.real[bitReversedIndices[i]] = complexSignal.real[i];
ordered.imag[bitReversedIndices[i]] = complexSignal.imag[i];
}
for (var i = 0; i < N; i++) {
ordered.real[bitReversedIndices[i]] = complexSignal.real[i];
ordered.imag[bitReversedIndices[i]] = complexSignal.imag[i];
}
for (var _i = 0; _i < N; _i++) {
complexSignal.real[_i] = ordered.real[_i];
complexSignal.imag[_i] = ordered.imag[_i];
} // iterate over the number of stages
for (var _i = 0; _i < N; _i++) {
complexSignal.real[_i] = ordered.real[_i];
complexSignal.imag[_i] = ordered.imag[_i];
} // iterate over the number of stages
for (var n = 1; n <= logN; n++) {
var currN = Math.pow(2, n); // find twiddle factors
for (var n = 1; n <= logN; n++) {
var currN = Math.pow(2, n); // find twiddle factors
for (var k = 0; k < currN / 2; k++) {
var twiddle = utils.euler(k, currN); // on each block of FT, implement the butterfly diagram
for (var k = 0; k < currN / 2; k++) {
var twiddle = utils.euler(k, currN); // on each block of FT, implement the butterfly diagram
for (var m = 0; m < N / currN; m++) {
var currEvenIndex = currN * m + k;
var currOddIndex = currN * m + k + currN / 2;
var currEvenIndexSample = {
'real': complexSignal.real[currEvenIndex],
'imag': complexSignal.imag[currEvenIndex]
};
var currOddIndexSample = {
'real': complexSignal.real[currOddIndex],
'imag': complexSignal.imag[currOddIndex]
};
var odd = utils.multiply(twiddle, currOddIndexSample);
var subtractionResult = utils.subtract(currEvenIndexSample, odd);
complexSignal.real[currOddIndex] = subtractionResult.real;
complexSignal.imag[currOddIndex] = subtractionResult.imag;
var additionResult = utils.add(odd, currEvenIndexSample);
complexSignal.real[currEvenIndex] = additionResult.real;
complexSignal.imag[currEvenIndex] = additionResult.imag;
}
}
}
for (var m = 0; m < N / currN; m++) {
var currEvenIndex = currN * m + k;
var currOddIndex = currN * m + k + currN / 2;
var currEvenIndexSample = {
'real': complexSignal.real[currEvenIndex],
'imag': complexSignal.imag[currEvenIndex]
};
var currOddIndexSample = {
'real': complexSignal.real[currOddIndex],
'imag': complexSignal.imag[currOddIndex]
};
var odd = utils.multiply(twiddle, currOddIndexSample);
var subtractionResult = utils.subtract(currEvenIndexSample, odd);
complexSignal.real[currOddIndex] = subtractionResult.real;
complexSignal.imag[currOddIndex] = subtractionResult.imag;
var additionResult = utils.add(odd, currEvenIndexSample);
complexSignal.real[currEvenIndex] = additionResult.real;
complexSignal.imag[currEvenIndex] = additionResult.imag;
}
}
}
return complexSignal;
}; // complex to real ifft
return complexSignal;
}; // complex to real ifft
var ifft = function ifft(signal) {
if (signal.real === undefined || signal.imag === undefined) {
throw new Error("IFFT only accepts a complex input.");
}
var ifft = function ifft(signal) {
if (signal.real === undefined || signal.imag === undefined) {
throw new Error("IFFT only accepts a complex input.");
}
var N = signal.real.length;
var complexSignal = {
'real': [],
'imag': []
}; //take complex conjugate in order to be able to use the regular FFT for IFFT
var N = signal.real.length;
var complexSignal = {
'real': [],
'imag': []
}; //take complex conjugate in order to be able to use the regular FFT for IFFT
for (var i = 0; i < N; i++) {
var currentSample = {
'real': signal.real[i],
'imag': signal.imag[i]
};
var conjugateSample = utils.conj(currentSample);
complexSignal.real[i] = conjugateSample.real;
complexSignal.imag[i] = conjugateSample.imag;
} //compute
for (var i = 0; i < N; i++) {
var currentSample = {
'real': signal.real[i],
'imag': signal.imag[i]
};
var conjugateSample = utils.conj(currentSample);
complexSignal.real[i] = conjugateSample.real;
complexSignal.imag[i] = conjugateSample.imag;
} //compute
var X = fft(complexSignal); //normalize
var X = fft(complexSignal); //normalize
complexSignal.real = X.real.map(function (val) {
return val / N;
});
complexSignal.imag = X.imag.map(function (val) {
return val / N;
});
return complexSignal;
};
complexSignal.real = X.real.map(function (val) {
return val / N;
});
complexSignal.imag = X.imag.map(function (val) {
return val / N;
});
return complexSignal;
};
var fft_1 = {
fft: fft,
ifft: ifft
};
var fft_1 = {
fft: fft,
ifft: ifft
};
/**
* MeydaAnalyzer
* @classdesc
* Meyda's interface to the Web Audio API. MeydaAnalyzer abstracts an API on
* top of the Web Audio API's ScriptProcessorNode, running the Meyda audio
* feature extractors inside that context.
*
* MeydaAnalyzer's constructor should not be called directly - MeydaAnalyzer
* objects should be generated using the {@link Meyda.createMeydaAnalyzer}
* factory function in the main Meyda class.
*
* @example
* const analyzer = Meyda.createMeydaAnalyzer({
* "audioContext": audioContext,
* "source": source,
* "bufferSize": 512,
* "featureExtractors": ["rms"],
* "inputs": 2,
* "numberOfMFCCCoefficients": 20
* "callback": features => {
* levelRangeElement.value = features.rms;
* }
* });
* @hideconstructor
*/
/**
* MeydaAnalyzer
* @classdesc
* Meyda's interface to the Web Audio API. MeydaAnalyzer abstracts an API on
* top of the Web Audio API's ScriptProcessorNode, running the Meyda audio
* feature extractors inside that context.
*
* MeydaAnalyzer's constructor should not be called directly - MeydaAnalyzer
* objects should be generated using the {@link Meyda.createMeydaAnalyzer}
* factory function in the main Meyda class.
*
* @example
* const analyzer = Meyda.createMeydaAnalyzer({
* "audioContext": audioContext,
* "source": source,
* "bufferSize": 512,
* "featureExtractors": ["rms"],
* "inputs": 2,
* "numberOfMFCCCoefficients": 20
* "callback": features => {
* levelRangeElement.value = features.rms;
* }
* });
* @hideconstructor
*/
class MeydaAnalyzer {
constructor(options, _this) {
this._m = _this;
class MeydaAnalyzer {
constructor(options, _this) {
this._m = _this;
if (!options.audioContext) {
throw this._m.errors.noAC;
} else if (options.bufferSize && !isPowerOfTwo(options.bufferSize)) {
throw this._m._errors.notPow2;
} else if (!options.source) {
throw this._m._errors.noSource;
}
if (!options.audioContext) {
throw this._m.errors.noAC;
} else if (options.bufferSize && !isPowerOfTwo(options.bufferSize)) {
throw this._m._errors.notPow2;
} else if (!options.source) {
throw this._m._errors.noSource;
}
this._m.audioContext = options.audioContext; // TODO: validate options
this._m.audioContext = options.audioContext; // TODO: validate options
this._m.bufferSize = options.bufferSize || this._m.bufferSize || 256;
this._m.hopSize = options.hopSize || this._m.hopSize || this._m.bufferSize;
this._m.sampleRate = options.sampleRate || this._m.audioContext.sampleRate || 44100;
this._m.callback = options.callback;
this._m.windowingFunction = options.windowingFunction || "hanning";
this._m.featureExtractors = extractors;
this._m.EXTRACTION_STARTED = options.startImmediately || false;
this._m.channel = typeof options.channel === "number" ? options.channel : 0;
this._m.inputs = options.inputs || 1;
this._m.outputs = options.outputs || 1;
this._m.numberOfMFCCCoefficients = options.numberOfMFCCCoefficients || this._m.numberOfMFCCCoefficients || 13; //create nodes
this._m.bufferSize = options.bufferSize || this._m.bufferSize || 256;
this._m.hopSize = options.hopSize || this._m.hopSize || this._m.bufferSize;
this._m.sampleRate = options.sampleRate || this._m.audioContext.sampleRate || 44100;
this._m.callback = options.callback;
this._m.windowingFunction = options.windowingFunction || "hanning";
this._m.featureExtractors = extractors;
this._m.EXTRACTION_STARTED = options.startImmediately || false;
this._m.channel = typeof options.channel === "number" ? options.channel : 0;
this._m.inputs = options.inputs || 1;
this._m.outputs = options.outputs || 1;
this._m.numberOfMFCCCoefficients = options.numberOfMFCCCoefficients || this._m.numberOfMFCCCoefficients || 13; //create nodes
this._m.spn = this._m.audioContext.createScriptProcessor(this._m.bufferSize, this._m.inputs, this._m.outputs);
this._m.spn = this._m.audioContext.createScriptProcessor(this._m.bufferSize, this._m.inputs, this._m.outputs);
this._m.spn.connect(this._m.audioContext.destination);
this._m.spn.connect(this._m.audioContext.destination);
this._m._featuresToExtract = options.featureExtractors || []; //always recalculate BS and MFB when a new Meyda analyzer is created.
this._m._featuresToExtract = options.featureExtractors || []; //always recalculate BS and MFB when a new Meyda analyzer is created.
this._m.barkScale = createBarkScale(this._m.bufferSize, this._m.sampleRate, this._m.bufferSize);
this._m.melFilterBank = createMelFilterBank(Math.max(this._m.melBands, this._m.numberOfMFCCCoefficients), this._m.sampleRate, this._m.bufferSize);
this._m.inputData = null;
this._m.previousInputData = null;
this._m.frame = null;
this._m.previousFrame = null;
this.setSource(options.source);
this._m.barkScale = createBarkScale(this._m.bufferSize, this._m.sampleRate, this._m.bufferSize);
this._m.melFilterBank = createMelFilterBank(Math.max(this._m.melBands, this._m.numberOfMFCCCoefficients), this._m.sampleRate, this._m.bufferSize);
this._m.inputData = null;
this._m.previousInputData = null;
this._m.frame = null;
this._m.previousFrame = null;
this.setSource(options.source);
this._m.spn.onaudioprocess = e => {
if (this._m.inputData !== null) {
this._m.previousInputData = this._m.inputData;
}
this._m.spn.onaudioprocess = e => {
if (this._m.inputData !== null) {
this._m.previousInputData = this._m.inputData;
}
this._m.inputData = e.inputBuffer.getChannelData(this._m.channel);
this._m.inputData = e.inputBuffer.getChannelData(this._m.channel);
if (!this._m.previousInputData) {
var buffer = this._m.inputData;
} else {
var buffer = new Float32Array(this._m.previousInputData.length + this._m.inputData.length - this._m.hopSize);
buffer.set(this._m.previousInputData.slice(this._m.hopSize));
buffer.set(this._m.inputData, this._m.previousInputData.length - this._m.hopSize);
}
if (!this._m.previousInputData) {
var buffer = this._m.inputData;
} else {
var buffer = new Float32Array(this._m.previousInputData.length + this._m.inputData.length - this._m.hopSize);
buffer.set(this._m.previousInputData.slice(this._m.hopSize));
buffer.set(this._m.inputData, this._m.previousInputData.length - this._m.hopSize);
}
var frames = frame(buffer, this._m.bufferSize, this._m.hopSize);
frames.forEach(f => {
this._m.frame = f;
var frames = frame(buffer, this._m.bufferSize, this._m.hopSize);
frames.forEach(f => {
this._m.frame = f;
var features = this._m.extract(this._m._featuresToExtract, this._m.frame, this._m.previousFrame); // call callback if applicable
var features = this._m.extract(this._m._featuresToExtract, this._m.frame, this._m.previousFrame); // call callback if applicable
if (typeof this._m.callback === "function" && this._m.EXTRACTION_STARTED) {
this._m.callback(features);
}
if (typeof this._m.callback === "function" && this._m.EXTRACTION_STARTED) {
this._m.callback(features);
}
this._m.previousFrame = this._m.frame;
});
};
}
/**
* Start feature extraction
* The audio features will be passed to the callback function that was defined
* in the MeydaOptions that were passed to the factory when constructing the
* MeydaAnalyzer.
* @param {(string|Array.<string>)} [features]
* Change the features that Meyda is extracting. Defaults to the features that
* were set upon construction in the options parameter.
* @example
* analyzer.start('chroma');
*/
this._m.previousFrame = this._m.frame;
});
};
}
/**
* Start feature extraction
* The audio features will be passed to the callback function that was defined
* in the MeydaOptions that were passed to the factory when constructing the
* MeydaAnalyzer.
* @param {(string|Array.<string>)} [features]
* Change the features that Meyda is extracting. Defaults to the features that
* were set upon construction in the options parameter.
* @example
* analyzer.start('chroma');
*/
start(features) {
this._m._featuresToExtract = features || this._m._featuresToExtract;
this._m.EXTRACTION_STARTED = true;
}
/**
* Stop feature extraction.
* @example
* analyzer.stop();
*/
start(features) {
this._m._featuresToExtract = features || this._m._featuresToExtract;
this._m.EXTRACTION_STARTED = true;
}
/**
* Stop feature extraction.
* @example
* analyzer.stop();
*/
stop() {
this._m.EXTRACTION_STARTED = false;
}
/**
* Set the Audio Node for Meyda to listen to.
* @param {AudioNode} source - The Audio Node for Meyda to listen to
* @example
* analyzer.setSource(audioSourceNode);
*/
stop() {
this._m.EXTRACTION_STARTED = false;
}
/**
* Set the Audio Node for Meyda to listen to.
* @param {AudioNode} source - The Audio Node for Meyda to listen to
* @example
* analyzer.setSource(audioSourceNode);
*/
setSource(source) {
this._m.source && this._m.source.disconnect(this._m.spn);
this._m.source = source;
setSource(source) {
this._m.source && this._m.source.disconnect(this._m.spn);
this._m.source = source;
this._m.source.connect(this._m.spn);
}
/**
* Set the channel of the audio node for Meyda to listen to
* @param {number} channel - the index of the channel on the input audio node
* for Meyda to listen to.
* @example
* analyzer.setChannel(0);
*/
this._m.source.connect(this._m.spn);
}
/**
* Set the channel of the audio node for Meyda to listen to
* @param {number} channel - the index of the channel on the input audio node
* for Meyda to listen to.
* @example
* analyzer.setChannel(0);
*/
setChannel(channel) {
if (channel <= this._m.inputs) {
this._m.channel = channel;
} else {
console.error(`Channel ${channel} does not exist. Make sure you've provided a value for 'inputs' that is greater than ${channel} when instantiating the MeydaAnalyzer`);
}
}
/**
* Get a set of features from the current frame.
* @param {(string|Array.<string>)} [features]
* Change the features that Meyda is extracting
* @example
* analyzer.get('spectralFlatness');
*/
setChannel(channel) {
if (channel <= this._m.inputs) {
this._m.channel = channel;
} else {
console.error(`Channel ${channel} does not exist. Make sure you've provided a value for 'inputs' that is greater than ${channel} when instantiating the MeydaAnalyzer`);
}
}
/**
* Get a set of features from the current frame.
* @param {(string|Array.<string>)} [features]
* Change the features that Meyda is extracting
* @example
* analyzer.get('spectralFlatness');
*/
get(features) {
if (this._m.inputData) {
return this._m.extract(features || this._m._featuresToExtract, this._m.inputData, this._m.previousInputData);
} else {
return null;
}
}
get(features) {
if (this._m.inputData) {
return this._m.extract(features || this._m._featuresToExtract, this._m.inputData, this._m.previousInputData);
} else {
return null;
}
}
}
}
/**
* Meyda Module
* @module meyda
*/
/**
* Meyda Module
* @module meyda
*/
/**
* Options for constructing a MeydaAnalyzer
* @typedef {Object} MeydaOptions
* @property {AudioContext} audioContext - The Audio Context for the MeydaAnalyzer to operate in.
* @property {AudioNode} source - The Audio Node for Meyda to listen to.
* @property {number} [bufferSize] - The size of the buffer.
* @property {number} [hopSize] - The hop size between buffers.
* @property {number} [sampleRate] - The number of samples per second in the audio context.
* @property {Function} [callback] - A function to receive the frames of audio features
* @property {string} [windowingFunction] - The Windowing Function to apply to the signal before transformation to the frequency domain
* @property {string|Array.<string>} [featureExtractors] - Specify the feature extractors you want to run on the audio.
* @property {boolean} [startImmediately] - Pass `true` to start feature extraction immediately
* @property {number} [numberOfMFCCCoefficients] - The number of MFCC co-efficients that the MFCC feature extractor should return
*/
/**
* Options for constructing a MeydaAnalyzer
* @typedef {Object} MeydaOptions
* @property {AudioContext} audioContext - The Audio Context for the MeydaAnalyzer to operate in.
* @property {AudioNode} source - The Audio Node for Meyda to listen to.
* @property {number} [bufferSize] - The size of the buffer.
* @property {number} [hopSize] - The hop size between buffers.
* @property {number} [sampleRate] - The number of samples per second in the audio context.
* @property {Function} [callback] - A function to receive the frames of audio features
* @property {string} [windowingFunction] - The Windowing Function to apply to the signal before transformation to the frequency domain
* @property {string|Array.<string>} [featureExtractors] - Specify the feature extractors you want to run on the audio.
* @property {boolean} [startImmediately] - Pass `true` to start feature extraction immediately
* @property {number} [numberOfMFCCCoefficients] - The number of MFCC co-efficients that the MFCC feature extractor should return
*/
/**
* Web Audio context
* Either an {@link AudioContext|https://developer.mozilla.org/en-US/docs/Web/API/AudioContext}
* or an {@link OfflineAudioContext|https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext}
* @typedef {Object} AudioContext
*/
/**
* Web Audio context
* Either an {@link AudioContext|https://developer.mozilla.org/en-US/docs/Web/API/AudioContext}
* or an {@link OfflineAudioContext|https://developer.mozilla.org/en-US/docs/Web/API/OfflineAudioContext}
* @typedef {Object} AudioContext
*/
/**
* AudioNode
* A Web AudioNode
* @typedef {Object} AudioNode
*/
/**
* AudioNode
* A Web AudioNode
* @typedef {Object} AudioNode
*/
/**
* ScriptProcessorNode
* A Web Audio ScriptProcessorNode
* @typedef {Object} ScriptProcessorNode
*/
/**
* ScriptProcessorNode
* A Web Audio ScriptProcessorNode
* @typedef {Object} ScriptProcessorNode
*/
/**
* @class Meyda
* @hideconstructor
* @classdesc
* The schema for the default export of the Meyda library.
* @example
* var Meyda = require('meyda');
*/
/**
* @class Meyda
* @hideconstructor
* @classdesc
* The schema for the default export of the Meyda library.
* @example
* var Meyda = require('meyda');
*/
var Meyda = {
/**
* Meyda stores a reference to the relevant audio context here for use inside
* the Web Audio API.
* @instance
* @member {AudioContext}
*/
audioContext: null,
var Meyda = {
/**
* Meyda stores a reference to the relevant audio context here for use inside
* the Web Audio API.
* @instance
* @member {AudioContext}
*/
audioContext: null,
/**
* Meyda keeps an internal ScriptProcessorNode in which it runs audio feature
* extraction. The ScriptProcessorNode is stored in this member variable.
* @instance
* @member {ScriptProcessorNode}
*/
spn: null,
/**
* Meyda keeps an internal ScriptProcessorNode in which it runs audio feature
* extraction. The ScriptProcessorNode is stored in this member variable.
* @instance
* @member {ScriptProcessorNode}
*/
spn: null,
/**
* The length of each buffer that Meyda will extract audio on. When recieving
* input via the Web Audio API, the Script Processor Node chunks incoming audio
* into arrays of this length. Longer buffers allow for more precision in the
* frequency domain, but increase the amount of time it takes for Meyda to
* output a set of audio features for the buffer. You can calculate how many
* sets of audio features Meyda will output per second by dividing the
* buffer size by the sample rate. If you're using Meyda for visualisation,
* make sure that you're collecting audio features at a rate that's faster
* than or equal to the video frame rate you expect.
* @instance
* @member {number}
*/
bufferSize: 512,
/**
* The length of each buffer that Meyda will extract audio on. When recieving
* input via the Web Audio API, the Script Processor Node chunks incoming audio
* into arrays of this length. Longer buffers allow for more precision in the
* frequency domain, but increase the amount of time it takes for Meyda to
* output a set of audio features for the buffer. You can calculate how many
* sets of audio features Meyda will output per second by dividing the
* buffer size by the sample rate. If you're using Meyda for visualisation,
* make sure that you're collecting audio features at a rate that's faster
* than or equal to the video frame rate you expect.
* @instance
* @member {number}
*/
bufferSize: 512,
/**
* The number of samples per second of the incoming audio. This affects
* feature extraction outside of the context of the Web Audio API, and must be
* set accurately - otherwise calculations will be off.
* @instance
* @member {number}
*/
sampleRate: 44100,
/**
* The number of samples per second of the incoming audio. This affects
* feature extraction outside of the context of the Web Audio API, and must be
* set accurately - otherwise calculations will be off.
* @instance
* @member {number}
*/
sampleRate: 44100,
/**
* The number of Mel bands to use in the Mel Frequency Cepstral Co-efficients
* feature extractor
* @instance
* @member {number}
*/
melBands: 26,
/**
* The number of Mel bands to use in the Mel Frequency Cepstral Co-efficients
* feature extractor
* @instance
* @member {number}
*/
melBands: 26,
/**
* The number of bands to divide the spectrum into for the Chroma feature
* extractor. 12 is the standard number of semitones per octave in the western
* music tradition, but Meyda can use an arbitrary number of bands, which
* can be useful for microtonal music.
* @instance
* @member {number}
*/
chromaBands: 12,
/**
* The number of bands to divide the spectrum into for the Chroma feature
* extractor. 12 is the standard number of semitones per octave in the western
* music tradition, but Meyda can use an arbitrary number of bands, which
* can be useful for microtonal music.
* @instance
* @member {number}
*/
chromaBands: 12,
/**
* A function you can provide that will be called for each buffer that Meyda
* receives from its source node
* @instance
* @member {Function}
*/
callback: null,
/**
* A function you can provide that will be called for each buffer that Meyda
* receives from its source node
* @instance
* @member {Function}
*/
callback: null,
/**
* Specify the windowing function to apply to the buffer before the
* transformation from the time domain to the frequency domain is performed
*
* The default windowing function is the hanning window.
*
* @instance
* @member {string}
*/
windowingFunction: "hanning",
/**
* Specify the windowing function to apply to the buffer before the
* transformation from the time domain to the frequency domain is performed
*
* The default windowing function is the hanning window.
*
* @instance
* @member {string}
*/
windowingFunction: "hanning",
/**
* @member {object}
*/
featureExtractors: extractors,
EXTRACTION_STARTED: false,
/**
* @member {object}
*/
featureExtractors: extractors,
EXTRACTION_STARTED: false,
/**
* The number of MFCC co-efficients that the MFCC feature extractor should return
* @instance
* @member {number}
*/
numberOfMFCCCoefficients: 13,
_featuresToExtract: [],
windowing: applyWindow,
_errors: {
notPow2: new Error("Meyda: Buffer size must be a power of 2, e.g. 64 or 512"),
featureUndef: new Error("Meyda: No features defined."),
invalidFeatureFmt: new Error("Meyda: Invalid feature format"),
invalidInput: new Error("Meyda: Invalid input."),
noAC: new Error("Meyda: No AudioContext specified."),
noSource: new Error("Meyda: No source node specified.")
},
/**
* The number of MFCC co-efficients that the MFCC feature extractor should return
* @instance
* @member {number}
*/
numberOfMFCCCoefficients: 13,
_featuresToExtract: [],
windowing: applyWindow,
_errors: {
notPow2: new Error("Meyda: Buffer size must be a power of 2, e.g. 64 or 512"),
featureUndef: new Error("Meyda: No features defined."),
invalidFeatureFmt: new Error("Meyda: Invalid feature format"),
invalidInput: new Error("Meyda: Invalid input."),
noAC: new Error("Meyda: No AudioContext specified."),
noSource: new Error("Meyda: No source node specified.")
},
/**
* @summary
* Create a MeydaAnalyzer
*
* A factory function for creating a MeydaAnalyzer, the interface for using
* Meyda in the context of Web Audio.
*
* @method
* @param {MeydaOptions} options Options - an object containing configuration
* @returns {MeydaAnalyzer}
* @example
* const analyzer = Meyda.createMeydaAnalyzer({
* "audioContext": audioContext,
* "source": source,
* "bufferSize": 512,
* "featureExtractors": ["rms"],
* "inputs": 2,
* "callback": features => {
* levelRangeElement.value = features.rms;
* }
* });
*/
createMeydaAnalyzer: function (options) {
return new MeydaAnalyzer(options, Object.assign({}, Meyda));
},
/**
* @summary
* Create a MeydaAnalyzer
*
* A factory function for creating a MeydaAnalyzer, the interface for using
* Meyda in the context of Web Audio.
*
* @method
* @param {MeydaOptions} options Options - an object containing configuration
* @returns {MeydaAnalyzer}
* @example
* const analyzer = Meyda.createMeydaAnalyzer({
* "audioContext": audioContext,
* "source": source,
* "bufferSize": 512,
* "featureExtractors": ["rms"],
* "inputs": 2,
* "callback": features => {
* levelRangeElement.value = features.rms;
* }
* });
*/
createMeydaAnalyzer: function (options) {
return new MeydaAnalyzer(options, Object.assign({}, Meyda));
},
/**
* Extract an audio feature from a buffer
*
* Unless `meyda.windowingFunction` is set otherwise, `extract` will
* internally apply a hanning window to the buffer prior to conversion into
* the frequency domain.
*
* @function
* @param {(string|Array.<string>)} feature - the feature you want to extract
* @param {Array.<number>} signal
* An array of numbers that represents the signal. It should be of length
* `meyda.bufferSize`
* @param {Array.<number>} [previousSignal] - the previous buffer
* @returns {object} Features
* @example
* meyda.bufferSize = 2048;
* const features = meyda.extract(['zcr', 'spectralCentroid'], signal);
*/
extract: function (feature, signal, previousSignal) {
if (!signal) throw this._errors.invalidInput;else if (typeof signal != "object") throw this._errors.invalidInput;else if (!feature) throw this._errors.featureUndef;else if (!isPowerOfTwo(signal.length)) throw this._errors.notPow2;
/**
* Extract an audio feature from a buffer
*
* Unless `meyda.windowingFunction` is set otherwise, `extract` will
* internally apply a hanning window to the buffer prior to conversion into
* the frequency domain.
*
* @function
* @param {(string|Array.<string>)} feature - the feature you want to extract
* @param {Array.<number>} signal
* An array of numbers that represents the signal. It should be of length
* `meyda.bufferSize`
* @param {Array.<number>} [previousSignal] - the previous buffer
* @returns {object} Features
* @example
* meyda.bufferSize = 2048;
* const features = meyda.extract(['zcr', 'spectralCentroid'], signal);
*/
extract: function (feature, signal, previousSignal) {
if (!signal) throw this._errors.invalidInput;else if (typeof signal != "object") throw this._errors.invalidInput;else if (!feature) throw this._errors.featureUndef;else if (!isPowerOfTwo(signal.length)) throw this._errors.notPow2;
if (typeof this.barkScale == "undefined" || this.barkScale.length != this.bufferSize) {
this.barkScale = createBarkScale(this.bufferSize, this.sampleRate, this.bufferSize);
} // Recalculate mel bank if buffer length changed
if (typeof this.barkScale == "undefined" || this.barkScale.length != this.bufferSize) {
this.barkScale = createBarkScale(this.bufferSize, this.sampleRate, this.bufferSize);
} // Recalculate mel bank if buffer length changed
if (typeof this.melFilterBank == "undefined" || this.barkScale.length != this.bufferSize || this.melFilterBank.length != this.melBands) {
this.melFilterBank = createMelFilterBank(Math.max(this.melBands, this.numberOfMFCCCoefficients), this.sampleRate, this.bufferSize);
} // Recalculate chroma bank if buffer length changed
if (typeof this.melFilterBank == "undefined" || this.barkScale.length != this.bufferSize || this.melFilterBank.length != this.melBands) {
this.melFilterBank = createMelFilterBank(Math.max(this.melBands, this.numberOfMFCCCoefficients), this.sampleRate, this.bufferSize);
} // Recalculate chroma bank if buffer length changed
if (typeof this.chromaFilterBank == "undefined" || this.chromaFilterBank.length != this.chromaBands) {
this.chromaFilterBank = createChromaFilterBank(this.chromaBands, this.sampleRate, this.bufferSize);
}
if (typeof this.chromaFilterBank == "undefined" || this.chromaFilterBank.length != this.chromaBands) {
this.chromaFilterBank = createChromaFilterBank(this.chromaBands, this.sampleRate, this.bufferSize);
}
if (typeof signal.buffer == "undefined") {
//signal is a normal array, convert to F32A
this.signal = arrayToTyped(signal);
} else {
this.signal = signal;
}
if (typeof signal.buffer == "undefined") {
//signal is a normal array, convert to F32A
this.signal = arrayToTyped(signal);
} else {
this.signal = signal;
}
let preparedSignal = prepareSignalWithSpectrum(signal, this.windowingFunction, this.bufferSize);
this.signal = preparedSignal.windowedSignal;
this.complexSpectrum = preparedSignal.complexSpectrum;
this.ampSpectrum = preparedSignal.ampSpectrum;
let preparedSignal = prepareSignalWithSpectrum(signal, this.windowingFunction, this.bufferSize);
this.signal = preparedSignal.windowedSignal;
this.complexSpectrum = preparedSignal.complexSpectrum;
this.ampSpectrum = preparedSignal.ampSpectrum;
if (previousSignal) {
let preparedSignal = prepareSignalWithSpectrum(previousSignal, this.windowingFunction, this.bufferSize);
this.previousSignal = preparedSignal.windowedSignal;
this.previousComplexSpectrum = preparedSignal.complexSpectrum;
this.previousAmpSpectrum = preparedSignal.ampSpectrum;
}
if (previousSignal) {
let preparedSignal = prepareSignalWithSpectrum(previousSignal, this.windowingFunction, this.bufferSize);
this.previousSignal = preparedSignal.windowedSignal;
this.previousComplexSpectrum = preparedSignal.complexSpectrum;
this.previousAmpSpectrum = preparedSignal.ampSpectrum;
}
const extract = feature => {
return this.featureExtractors[feature]({
ampSpectrum: this.ampSpectrum,
chromaFilterBank: this.chromaFilterBank,
complexSpectrum: this.complexSpectrum,
signal: this.signal,
bufferSize: this.bufferSize,
sampleRate: this.sampleRate,
barkScale: this.barkScale,
melFilterBank: this.melFilterBank,
previousSignal: this.previousSignal,
previousAmpSpectrum: this.previousAmpSpectrum,
previousComplexSpectrum: this.previousComplexSpectrum,
numberOfMFCCCoefficients: this.numberOfMFCCCoefficients
});
};
const extract = feature => {
return this.featureExtractors[feature]({
ampSpectrum: this.ampSpectrum,
chromaFilterBank: this.chromaFilterBank,
complexSpectrum: this.complexSpectrum,
signal: this.signal,
bufferSize: this.bufferSize,
sampleRate: this.sampleRate,
barkScale: this.barkScale,
melFilterBank: this.melFilterBank,
previousSignal: this.previousSignal,
previousAmpSpectrum: this.previousAmpSpectrum,
previousComplexSpectrum: this.previousComplexSpectrum,
numberOfMFCCCoefficients: this.numberOfMFCCCoefficients
});
};
if (typeof feature === "object") {
return feature.reduce((acc, el) => Object.assign({}, acc, {
[el]: extract(el)
}), {});
} else if (typeof feature === "string") {
return extract(feature);
} else {
throw this._errors.invalidFeatureFmt;
}
}
};
if (typeof feature === "object") {
return feature.reduce((acc, el) => Object.assign({}, acc, {
[el]: extract(el)
}), {});
} else if (typeof feature === "string") {
return extract(feature);
} else {
throw this._errors.invalidFeatureFmt;
}
}
};
var prepareSignalWithSpectrum = function (signal, windowingFunction, bufferSize) {
var preparedSignal = {};
var prepareSignalWithSpectrum = function (signal, windowingFunction, bufferSize) {
var preparedSignal = {};
if (typeof signal.buffer == "undefined") {
//signal is a normal array, convert to F32A
preparedSignal.signal = arrayToTyped(signal);
} else {
preparedSignal.signal = signal;
}
if (typeof signal.buffer == "undefined") {
//signal is a normal array, convert to F32A
preparedSignal.signal = arrayToTyped(signal);
} else {
preparedSignal.signal = signal;
}
preparedSignal.windowedSignal = applyWindow(preparedSignal.signal, windowingFunction);
preparedSignal.complexSpectrum = fft_1.fft(preparedSignal.windowedSignal);
preparedSignal.ampSpectrum = new Float32Array(bufferSize / 2);
preparedSignal.windowedSignal = applyWindow(preparedSignal.signal, windowingFunction);
preparedSignal.complexSpectrum = fft_1.fft(preparedSignal.windowedSignal);
preparedSignal.ampSpectrum = new Float32Array(bufferSize / 2);
for (var i = 0; i < bufferSize / 2; i++) {
preparedSignal.ampSpectrum[i] = Math.sqrt(Math.pow(preparedSignal.complexSpectrum.real[i], 2) + Math.pow(preparedSignal.complexSpectrum.imag[i], 2));
}
for (var i = 0; i < bufferSize / 2; i++) {
preparedSignal.ampSpectrum[i] = Math.sqrt(Math.pow(preparedSignal.complexSpectrum.real[i], 2) + Math.pow(preparedSignal.complexSpectrum.imag[i], 2));
}
return preparedSignal;
};
if (typeof window !== "undefined") window.Meyda = Meyda;
return preparedSignal;
};
if (typeof window !== "undefined") window.Meyda = Meyda;
return Meyda;
var main = /*#__PURE__*/Object.freeze({
__proto__: null,
'default': Meyda
});
var require$$0 = /*@__PURE__*/getAugmentedNamespace(main);
/*
This file is used as the entry point for only the web builds of Meyda. It is
*not* used as the entrypoint for the node build of Meyda. It makes sure that
Meyda directly exports itself on those builds, rather than the es6 default
export. The entrypoint of the node build of Meyda is `main.js`.
*/
var src = require$$0.default;
return src;
})));
//# sourceMappingURL=meyda.js.map

@@ -1,2 +0,2 @@

!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).Meyda=e()}(this,(function(){"use strict";function t(t){if(t.__esModule)return t;var e=Object.defineProperty({},"__esModule",{value:!0});return Object.keys(t).forEach((function(r){var a=Object.getOwnPropertyDescriptor(t,r);Object.defineProperty(e,r,a.get?a:{enumerable:!0,get:function(){return t[r]}})})),e}var e=Object.freeze({__proto__:null,blackman:function(t){let e=new Float32Array(t),r=2*Math.PI/(t-1),a=2*r;for(let n=0;n<t/2;n++)e[n]=.42-.5*Math.cos(n*r)+.08*Math.cos(n*a);for(let r=Math.ceil(t/2);r>0;r--)e[t-r]=e[r-1];return e},sine:function(t){let e=Math.PI/(t-1),r=new Float32Array(t);for(let a=0;a<t;a++)r[a]=Math.sin(e*a);return r},hanning:function(t){let e=new Float32Array(t);for(let r=0;r<t;r++)e[r]=.5-.5*Math.cos(2*Math.PI*r/(t-1));return e},hamming:function(t){let e=new Float32Array(t);for(let r=0;r<t;r++)e[r]=.54-.46*Math.cos(2*Math.PI*(r/t-1));return e}});let r={};function a(t){for(;t%2==0&&t>1;)t/=2;return 1===t}function n(t,a){if("rect"!==a){if(""!==a&&a||(a="hanning"),r[a]||(r[a]={}),!r[a][t.length])try{r[a][t.length]=e[a](t.length)}catch(t){throw new Error("Invalid windowing function")}t=function(t,e){let r=[];for(let a=0;a<Math.min(t.length,e.length);a++)r[a]=t[a]*e[a];return r}(t,r[a][t.length])}return t}function i(t,e,r){let a=new Float32Array(t);for(var n=0;n<a.length;n++)a[n]=n*e/r,a[n]=13*Math.atan(a[n]/1315.8)+3.5*Math.atan(Math.pow(a[n]/7518,2));return a}function o(t){return Float32Array.from(t)}function l(t){return 1125*Math.log(1+t/700)}function s(t,e,r){let a=new Float32Array(t+2),n=new Float32Array(t+2),i=e/2,o=l(0),s=(l(i)-o)/(t+1),u=Array(t+2);for(let t=0;t<a.length;t++)a[t]=t*s,n[t]=(m=a[t],700*(Math.exp(m/1125)-1)),u[t]=Math.floor((r+1)*n[t]/e);var m,h=Array(t);for(let t=0;t<h.length;t++){h[t]=Array.apply(null,new Array(r/2+1)).map(Number.prototype.valueOf,0);for(let e=u[t];e<u[t+1];e++)h[t][e]=(e-u[t])/(u[t+1]-u[t]);for(let e=u[t+1];e<u[t+2];e++)h[t][e]=(u[t+2]-e)/(u[t+2]-u[t+1])}return h}function u(t,e,r,a=5,n=2,i=!0,o=440){var l=Math.floor(r/2)+1,s=new Array(r).fill(0).map(((a,n)=>t*function(t,e){return Math.log2(16*t/e)}(e*n/r,o)));s[0]=s[1]-1.5*t;var u,m,h,p=s.slice(1).map(((t,e)=>Math.max(t-s[e])),1).concat([1]),f=Math.round(t/2),c=new Array(t).fill(0).map(((e,r)=>s.map((e=>(10*t+f+e-r)%t-f)))),g=c.map(((t,e)=>t.map(((t,r)=>Math.exp(-.5*Math.pow(2*c[e][r]/p[r],2))))));if(m=(u=g)[0].map((()=>0)),h=u.reduce(((t,e)=>(e.forEach(((e,r)=>{t[r]+=Math.pow(e,2)})),t)),m).map(Math.sqrt),g=u.map(((t,e)=>t.map(((t,e)=>t/(h[e]||1))))),n){var w=s.map((e=>Math.exp(-.5*Math.pow((e/t-a)/n,2))));g=g.map((t=>t.map(((t,e)=>t*w[e]))))}return i&&(g=[...g.slice(3),...g.slice(0,3)]),g.map((t=>t.slice(0,l)))}function m(t,e){for(var r=0,a=0,n=0;n<e.length;n++)r+=Math.pow(n,t)*Math.abs(e[n]),a+=e[n];return r/a}function h(t){if("object"!=typeof t.ampSpectrum||"object"!=typeof t.barkScale)throw new TypeError;var e=24,r=new Float32Array(e),a=0,n=t.ampSpectrum,i=new Int32Array(25);i[0]=0;var o=t.barkScale[n.length-1]/e,l=1;for(let r=0;r<n.length;r++)for(;t.barkScale[r]>o;)i[l++]=r,o=l*t.barkScale[n.length-1]/e;i[24]=n.length-1;for(let t=0;t<e;t++){let e=0;for(let r=i[t];r<i[t+1];r++)e+=n[r];r[t]=Math.pow(e,.23)}for(let t=0;t<r.length;t++)a+=r[t];return{specific:r,total:a}}function p(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;for(var t=new Float32Array(arguments[0].ampSpectrum.length),e=0;e<t.length;e++)t[e]=Math.pow(arguments[0].ampSpectrum[e],2);return t}var f=null;var c=function(t,e){var r=t.length;return e=e||2,f&&f[r]||function(t){(f=f||{})[t]=new Array(t*t);for(var e=Math.PI/t,r=0;r<t;r++)for(var a=0;a<t;a++)f[t][a+r*t]=Math.cos(e*(a+.5)*r)}(r),t.map((function(){return 0})).map((function(a,n){return e*t.reduce((function(t,e,a,i){return t+e*f[r][a+n*r]}),0)}))};var g=Object.freeze({__proto__:null,buffer:function(t){return t.signal},rms:function(t){if("object"!=typeof t.signal)throw new TypeError;for(var e=0,r=0;r<t.signal.length;r++)e+=Math.pow(t.signal[r],2);return e/=t.signal.length,e=Math.sqrt(e)},energy:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=0,e=0;e<arguments[0].signal.length;e++)t+=Math.pow(Math.abs(arguments[0].signal[e]),2);return t},complexSpectrum:function(t){return t.complexSpectrum},spectralSlope:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError;let e=0,r=0,a=new Float32Array(t.ampSpectrum.length),n=0,i=0;for(var o=0;o<t.ampSpectrum.length;o++){e+=t.ampSpectrum[o];let l=o*t.sampleRate/t.bufferSize;a[o]=l,n+=l*l,r+=l,i+=l*t.ampSpectrum[o]}return(t.ampSpectrum.length*i-r*e)/(e*(n-Math.pow(r,2)))},spectralCentroid:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;return m(1,arguments[0].ampSpectrum)},spectralRolloff:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;for(var t=arguments[0].ampSpectrum,e=arguments[0].sampleRate/(2*(t.length-1)),r=0,a=0;a<t.length;a++)r+=t[a];for(var n=.99*r,i=t.length-1;r>n&&i>=0;)r-=t[i],--i;return(i+1)*e},spectralFlatness:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;for(var t=0,e=0,r=0;r<arguments[0].ampSpectrum.length;r++)t+=Math.log(arguments[0].ampSpectrum[r]),e+=arguments[0].ampSpectrum[r];return Math.exp(t/arguments[0].ampSpectrum.length)*arguments[0].ampSpectrum.length/e},spectralSpread:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError;return Math.sqrt(m(2,t.ampSpectrum)-Math.pow(m(1,t.ampSpectrum),2))},spectralSkewness:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError;var e=m(1,t.ampSpectrum),r=m(2,t.ampSpectrum),a=m(3,t.ampSpectrum);return(2*Math.pow(e,3)-3*e*r+a)/Math.pow(Math.sqrt(r-Math.pow(e,2)),3)},spectralKurtosis:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;var t=arguments[0].ampSpectrum,e=m(1,t),r=m(2,t),a=m(3,t),n=m(4,t),i=-3*Math.pow(e,4)+6*e*r-4*e*a+n,o=Math.pow(Math.sqrt(r-Math.pow(e,2)),4);return i/o},amplitudeSpectrum:function(t){return t.ampSpectrum},zcr:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=0,e=1;e<arguments[0].signal.length;e++)(arguments[0].signal[e-1]>=0&&arguments[0].signal[e]<0||arguments[0].signal[e-1]<0&&arguments[0].signal[e]>=0)&&t++;return t},loudness:h,perceptualSpread:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=h(arguments[0]),e=0,r=0;r<t.specific.length;r++)t.specific[r]>e&&(e=t.specific[r]);var a=Math.pow((t.total-e)/t.total,2);return a},perceptualSharpness:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=h(arguments[0]),e=t.specific,r=0,a=0;a<e.length;a++)r+=a<15?(a+1)*e[a+1]:.066*Math.exp(.171*(a+1));return r*=.11/t.total},powerSpectrum:p,mfcc:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError("Valid ampSpectrum is required to generate MFCC");if("object"!=typeof t.melFilterBank)throw new TypeError("Valid melFilterBank is required to generate MFCC");let e=Math.min(40,Math.max(1,t.numberOfMFCCCoefficients||13)),r=p(t),a=t.melFilterBank.length,n=Array(a);if(a<e)throw new Error("Insufficient filter bank for requested number of coefficients");let i=new Float32Array(a);for(let e=0;e<i.length;e++){n[e]=new Float32Array(t.bufferSize/2),i[e]=0;for(let a=0;a<t.bufferSize/2;a++)n[e][a]=t.melFilterBank[e][a]*r[a],i[e]+=n[e][a];i[e]=Math.log(i[e]+1)}let o=Array.prototype.slice.call(i);return c(o).slice(0,e)},chroma:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError("Valid ampSpectrum is required to generate chroma");if("object"!=typeof t.chromaFilterBank)throw new TypeError("Valid chromaFilterBank is required to generate chroma");var e=t.chromaFilterBank.map(((e,r)=>t.ampSpectrum.reduce(((t,r,a)=>t+r*e[a]),0))),r=Math.max(...e);return r?e.map((t=>t/r)):e},spectralFlux:function(t){if("object"!=typeof t.signal||"object"!=typeof t.previousSignal)throw new TypeError;let e=0;for(let r=-t.bufferSize/2;r<signal.length/2-1;r++)x=Math.abs(t.signal[r])-Math.abs(t.previousSignal[r]),e+=(x+Math.abs(x))/2;return e}});function w(t){if(Array.isArray(t)){for(var e=0,r=Array(t.length);e<t.length;e++)r[e]=t[e];return r}return Array.from(t)}var S={},_={},d={bitReverseArray:function(t){if(void 0===S[t]){for(var e=(t-1).toString(2).length,r="0".repeat(e),a={},n=0;n<t;n++){var i=n.toString(2);i=r.substr(i.length)+i,i=[].concat(w(i)).reverse().join(""),a[n]=parseInt(i,2)}S[t]=a}return S[t]},multiply:function(t,e){return{real:t.real*e.real-t.imag*e.imag,imag:t.real*e.imag+t.imag*e.real}},add:function(t,e){return{real:t.real+e.real,imag:t.imag+e.imag}},subtract:function(t,e){return{real:t.real-e.real,imag:t.imag-e.imag}},euler:function(t,e){var r=-2*Math.PI*t/e;return{real:Math.cos(r),imag:Math.sin(r)}},conj:function(t){return t.imag*=-1,t},constructComplexArray:function(t){var e={};e.real=void 0===t.real?t.slice():t.real.slice();var r=e.real.length;return void 0===_[r]&&(_[r]=Array.apply(null,Array(r)).map(Number.prototype.valueOf,0)),e.imag=_[r].slice(),e}},y=function(t){var e={};void 0===t.real||void 0===t.imag?e=d.constructComplexArray(t):(e.real=t.real.slice(),e.imag=t.imag.slice());var r=e.real.length,a=Math.log2(r);if(Math.round(a)!=a)throw new Error("Input size must be a power of 2.");if(e.real.length!=e.imag.length)throw new Error("Real and imaginary components must have the same length.");for(var n=d.bitReverseArray(r),i={real:[],imag:[]},o=0;o<r;o++)i.real[n[o]]=e.real[o],i.imag[n[o]]=e.imag[o];for(var l=0;l<r;l++)e.real[l]=i.real[l],e.imag[l]=i.imag[l];for(var s=1;s<=a;s++)for(var u=Math.pow(2,s),m=0;m<u/2;m++)for(var h=d.euler(m,u),p=0;p<r/u;p++){var f=u*p+m,c=u*p+m+u/2,g={real:e.real[f],imag:e.imag[f]},w={real:e.real[c],imag:e.imag[c]},S=d.multiply(h,w),_=d.subtract(g,S);e.real[c]=_.real,e.imag[c]=_.imag;var y=d.add(S,g);e.real[f]=y.real,e.imag[f]=y.imag}return e},b=y;class v{constructor(t,e){if(this._m=e,!t.audioContext)throw this._m.errors.noAC;if(t.bufferSize&&!a(t.bufferSize))throw this._m._errors.notPow2;if(!t.source)throw this._m._errors.noSource;this._m.audioContext=t.audioContext,this._m.bufferSize=t.bufferSize||this._m.bufferSize||256,this._m.hopSize=t.hopSize||this._m.hopSize||this._m.bufferSize,this._m.sampleRate=t.sampleRate||this._m.audioContext.sampleRate||44100,this._m.callback=t.callback,this._m.windowingFunction=t.windowingFunction||"hanning",this._m.featureExtractors=g,this._m.EXTRACTION_STARTED=t.startImmediately||!1,this._m.channel="number"==typeof t.channel?t.channel:0,this._m.inputs=t.inputs||1,this._m.outputs=t.outputs||1,this._m.numberOfMFCCCoefficients=t.numberOfMFCCCoefficients||this._m.numberOfMFCCCoefficients||13,this._m.spn=this._m.audioContext.createScriptProcessor(this._m.bufferSize,this._m.inputs,this._m.outputs),this._m.spn.connect(this._m.audioContext.destination),this._m._featuresToExtract=t.featureExtractors||[],this._m.barkScale=i(this._m.bufferSize,this._m.sampleRate,this._m.bufferSize),this._m.melFilterBank=s(Math.max(this._m.melBands,this._m.numberOfMFCCCoefficients),this._m.sampleRate,this._m.bufferSize),this._m.inputData=null,this._m.previousInputData=null,this._m.frame=null,this._m.previousFrame=null,this.setSource(t.source),this._m.spn.onaudioprocess=t=>{if(null!==this._m.inputData&&(this._m.previousInputData=this._m.inputData),this._m.inputData=t.inputBuffer.getChannelData(this._m.channel),this._m.previousInputData)(e=new Float32Array(this._m.previousInputData.length+this._m.inputData.length-this._m.hopSize)).set(this._m.previousInputData.slice(this._m.hopSize)),e.set(this._m.inputData,this._m.previousInputData.length-this._m.hopSize);else var e=this._m.inputData;(function(t,e,r){if(t.length<e)throw new Error("Buffer is too short for frame length");if(r<1)throw new Error("Hop length cannot be less that 1");if(e<1)throw new Error("Frame length cannot be less that 1");const a=1+Math.floor((t.length-e)/r);return new Array(a).fill(0).map(((a,n)=>t.slice(n*r,n*r+e)))})(e,this._m.bufferSize,this._m.hopSize).forEach((t=>{this._m.frame=t;var e=this._m.extract(this._m._featuresToExtract,this._m.frame,this._m.previousFrame);"function"==typeof this._m.callback&&this._m.EXTRACTION_STARTED&&this._m.callback(e),this._m.previousFrame=this._m.frame}))}}start(t){this._m._featuresToExtract=t||this._m._featuresToExtract,this._m.EXTRACTION_STARTED=!0}stop(){this._m.EXTRACTION_STARTED=!1}setSource(t){this._m.source&&this._m.source.disconnect(this._m.spn),this._m.source=t,this._m.source.connect(this._m.spn)}setChannel(t){t<=this._m.inputs?this._m.channel=t:console.error(`Channel ${t} does not exist. Make sure you've provided a value for 'inputs' that is greater than ${t} when instantiating the MeydaAnalyzer`)}get(t){return this._m.inputData?this._m.extract(t||this._m._featuresToExtract,this._m.inputData,this._m.previousInputData):null}}var M={audioContext:null,spn:null,bufferSize:512,sampleRate:44100,melBands:26,chromaBands:12,callback:null,windowingFunction:"hanning",featureExtractors:g,EXTRACTION_STARTED:!1,numberOfMFCCCoefficients:13,_featuresToExtract:[],windowing:n,_errors:{notPow2:new Error("Meyda: Buffer size must be a power of 2, e.g. 64 or 512"),featureUndef:new Error("Meyda: No features defined."),invalidFeatureFmt:new Error("Meyda: Invalid feature format"),invalidInput:new Error("Meyda: Invalid input."),noAC:new Error("Meyda: No AudioContext specified."),noSource:new Error("Meyda: No source node specified.")},createMeydaAnalyzer:function(t){return new v(t,Object.assign({},M))},extract:function(t,e,r){if(!e)throw this._errors.invalidInput;if("object"!=typeof e)throw this._errors.invalidInput;if(!t)throw this._errors.featureUndef;if(!a(e.length))throw this._errors.notPow2;void 0!==this.barkScale&&this.barkScale.length==this.bufferSize||(this.barkScale=i(this.bufferSize,this.sampleRate,this.bufferSize)),void 0!==this.melFilterBank&&this.barkScale.length==this.bufferSize&&this.melFilterBank.length==this.melBands||(this.melFilterBank=s(Math.max(this.melBands,this.numberOfMFCCCoefficients),this.sampleRate,this.bufferSize)),void 0!==this.chromaFilterBank&&this.chromaFilterBank.length==this.chromaBands||(this.chromaFilterBank=u(this.chromaBands,this.sampleRate,this.bufferSize)),void 0===e.buffer?this.signal=o(e):this.signal=e;let n=F(e,this.windowingFunction,this.bufferSize);if(this.signal=n.windowedSignal,this.complexSpectrum=n.complexSpectrum,this.ampSpectrum=n.ampSpectrum,r){let t=F(r,this.windowingFunction,this.bufferSize);this.previousSignal=t.windowedSignal,this.previousComplexSpectrum=t.complexSpectrum,this.previousAmpSpectrum=t.ampSpectrum}const l=t=>this.featureExtractors[t]({ampSpectrum:this.ampSpectrum,chromaFilterBank:this.chromaFilterBank,complexSpectrum:this.complexSpectrum,signal:this.signal,bufferSize:this.bufferSize,sampleRate:this.sampleRate,barkScale:this.barkScale,melFilterBank:this.melFilterBank,previousSignal:this.previousSignal,previousAmpSpectrum:this.previousAmpSpectrum,previousComplexSpectrum:this.previousComplexSpectrum,numberOfMFCCCoefficients:this.numberOfMFCCCoefficients});if("object"==typeof t)return t.reduce(((t,e)=>Object.assign({},t,{[e]:l(e)})),{});if("string"==typeof t)return l(t);throw this._errors.invalidFeatureFmt}},F=function(t,e,r){var a={};void 0===t.buffer?a.signal=o(t):a.signal=t,a.windowedSignal=n(a.signal,e),a.complexSpectrum=b(a.windowedSignal),a.ampSpectrum=new Float32Array(r/2);for(var i=0;i<r/2;i++)a.ampSpectrum[i]=Math.sqrt(Math.pow(a.complexSpectrum.real[i],2)+Math.pow(a.complexSpectrum.imag[i],2));return a};return"undefined"!=typeof window&&(window.Meyda=M),t(Object.freeze({__proto__:null,default:M})).default}));
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).Meyda=e()}(this,(function(){"use strict";var t=Object.freeze({__proto__:null,blackman:function(t){let e=new Float32Array(t),r=2*Math.PI/(t-1),a=2*r;for(let i=0;i<t/2;i++)e[i]=.42-.5*Math.cos(i*r)+.08*Math.cos(i*a);for(let r=Math.ceil(t/2);r>0;r--)e[t-r]=e[r-1];return e},sine:function(t){let e=Math.PI/(t-1),r=new Float32Array(t);for(let a=0;a<t;a++)r[a]=Math.sin(e*a);return r},hanning:function(t){let e=new Float32Array(t);for(let r=0;r<t;r++)e[r]=.5-.5*Math.cos(2*Math.PI*r/(t-1));return e},hamming:function(t){let e=new Float32Array(t);for(let r=0;r<t;r++)e[r]=.54-.46*Math.cos(2*Math.PI*(r/t-1));return e}});let e={};function r(t){for(;t%2==0&&t>1;)t/=2;return 1===t}function a(r,a){if("rect"!==a){if(""!==a&&a||(a="hanning"),e[a]||(e[a]={}),!e[a][r.length])try{e[a][r.length]=t[a](r.length)}catch(t){throw new Error("Invalid windowing function")}r=function(t,e){let r=[];for(let a=0;a<Math.min(t.length,e.length);a++)r[a]=t[a]*e[a];return r}(r,e[a][r.length])}return r}function i(t,e,r){let a=new Float32Array(t);for(var i=0;i<a.length;i++)a[i]=i*e/r,a[i]=13*Math.atan(a[i]/1315.8)+3.5*Math.atan(Math.pow(a[i]/7518,2));return a}function n(t){return Float32Array.from(t)}function o(t){return 1125*Math.log(1+t/700)}function s(t,e,r){let a=new Float32Array(t+2),i=new Float32Array(t+2),n=e/2,s=o(0),l=(o(n)-s)/(t+1),u=Array(t+2);for(let t=0;t<a.length;t++)a[t]=t*l,i[t]=(m=a[t],700*(Math.exp(m/1125)-1)),u[t]=Math.floor((r+1)*i[t]/e);var m,h=Array(t);for(let t=0;t<h.length;t++){h[t]=Array.apply(null,new Array(r/2+1)).map(Number.prototype.valueOf,0);for(let e=u[t];e<u[t+1];e++)h[t][e]=(e-u[t])/(u[t+1]-u[t]);for(let e=u[t+1];e<u[t+2];e++)h[t][e]=(u[t+2]-e)/(u[t+2]-u[t+1])}return h}function l(t,e,r,a=5,i=2,n=!0,o=440){var s=Math.floor(r/2)+1,l=new Array(r).fill(0).map(((a,i)=>t*function(t,e){return Math.log2(16*t/e)}(e*i/r,o)));l[0]=l[1]-1.5*t;var u,m,h,p=l.slice(1).map(((t,e)=>Math.max(t-l[e])),1).concat([1]),f=Math.round(t/2),c=new Array(t).fill(0).map(((e,r)=>l.map((e=>(10*t+f+e-r)%t-f)))),g=c.map(((t,e)=>t.map(((t,r)=>Math.exp(-.5*Math.pow(2*c[e][r]/p[r],2))))));if(m=(u=g)[0].map((()=>0)),h=u.reduce(((t,e)=>(e.forEach(((e,r)=>{t[r]+=Math.pow(e,2)})),t)),m).map(Math.sqrt),g=u.map(((t,e)=>t.map(((t,e)=>t/(h[e]||1))))),i){var w=l.map((e=>Math.exp(-.5*Math.pow((e/t-a)/i,2))));g=g.map((t=>t.map(((t,e)=>t*w[e]))))}return n&&(g=[...g.slice(3),...g.slice(0,3)]),g.map((t=>t.slice(0,s)))}function u(t,e){for(var r=0,a=0,i=0;i<e.length;i++)r+=Math.pow(i,t)*Math.abs(e[i]),a+=e[i];return r/a}function m(t){if("object"!=typeof t.ampSpectrum||"object"!=typeof t.barkScale)throw new TypeError;var e=24,r=new Float32Array(e),a=0,i=t.ampSpectrum,n=new Int32Array(25);n[0]=0;var o=t.barkScale[i.length-1]/e,s=1;for(let r=0;r<i.length;r++)for(;t.barkScale[r]>o;)n[s++]=r,o=s*t.barkScale[i.length-1]/e;n[24]=i.length-1;for(let t=0;t<e;t++){let e=0;for(let r=n[t];r<n[t+1];r++)e+=i[r];r[t]=Math.pow(e,.23)}for(let t=0;t<r.length;t++)a+=r[t];return{specific:r,total:a}}function h(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;for(var t=new Float32Array(arguments[0].ampSpectrum.length),e=0;e<t.length;e++)t[e]=Math.pow(arguments[0].ampSpectrum[e],2);return t}var p=null;var f=function(t,e){var r=t.length;return e=e||2,p&&p[r]||function(t){(p=p||{})[t]=new Array(t*t);for(var e=Math.PI/t,r=0;r<t;r++)for(var a=0;a<t;a++)p[t][a+r*t]=Math.cos(e*(a+.5)*r)}(r),t.map((function(){return 0})).map((function(a,i){return e*t.reduce((function(t,e,a,n){return t+e*p[r][a+i*r]}),0)}))};var c=Object.freeze({__proto__:null,buffer:function(t){return t.signal},rms:function(t){if("object"!=typeof t.signal)throw new TypeError;for(var e=0,r=0;r<t.signal.length;r++)e+=Math.pow(t.signal[r],2);return e/=t.signal.length,e=Math.sqrt(e)},energy:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=0,e=0;e<arguments[0].signal.length;e++)t+=Math.pow(Math.abs(arguments[0].signal[e]),2);return t},complexSpectrum:function(t){return t.complexSpectrum},spectralSlope:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError;let e=0,r=0,a=new Float32Array(t.ampSpectrum.length),i=0,n=0;for(var o=0;o<t.ampSpectrum.length;o++){e+=t.ampSpectrum[o];let s=o*t.sampleRate/t.bufferSize;a[o]=s,i+=s*s,r+=s,n+=s*t.ampSpectrum[o]}return(t.ampSpectrum.length*n-r*e)/(e*(i-Math.pow(r,2)))},spectralCentroid:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;return u(1,arguments[0].ampSpectrum)},spectralRolloff:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;for(var t=arguments[0].ampSpectrum,e=arguments[0].sampleRate/(2*(t.length-1)),r=0,a=0;a<t.length;a++)r+=t[a];for(var i=.99*r,n=t.length-1;r>i&&n>=0;)r-=t[n],--n;return(n+1)*e},spectralFlatness:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;for(var t=0,e=0,r=0;r<arguments[0].ampSpectrum.length;r++)t+=Math.log(arguments[0].ampSpectrum[r]),e+=arguments[0].ampSpectrum[r];return Math.exp(t/arguments[0].ampSpectrum.length)*arguments[0].ampSpectrum.length/e},spectralSpread:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError;return Math.sqrt(u(2,t.ampSpectrum)-Math.pow(u(1,t.ampSpectrum),2))},spectralSkewness:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError;var e=u(1,t.ampSpectrum),r=u(2,t.ampSpectrum),a=u(3,t.ampSpectrum);return(2*Math.pow(e,3)-3*e*r+a)/Math.pow(Math.sqrt(r-Math.pow(e,2)),3)},spectralKurtosis:function(){if("object"!=typeof arguments[0].ampSpectrum)throw new TypeError;var t=arguments[0].ampSpectrum,e=u(1,t),r=u(2,t),a=u(3,t),i=u(4,t),n=-3*Math.pow(e,4)+6*e*r-4*e*a+i,o=Math.pow(Math.sqrt(r-Math.pow(e,2)),4);return n/o},amplitudeSpectrum:function(t){return t.ampSpectrum},zcr:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=0,e=1;e<arguments[0].signal.length;e++)(arguments[0].signal[e-1]>=0&&arguments[0].signal[e]<0||arguments[0].signal[e-1]<0&&arguments[0].signal[e]>=0)&&t++;return t},loudness:m,perceptualSpread:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=m(arguments[0]),e=0,r=0;r<t.specific.length;r++)t.specific[r]>e&&(e=t.specific[r]);var a=Math.pow((t.total-e)/t.total,2);return a},perceptualSharpness:function(){if("object"!=typeof arguments[0].signal)throw new TypeError;for(var t=m(arguments[0]),e=t.specific,r=0,a=0;a<e.length;a++)r+=a<15?(a+1)*e[a+1]:.066*Math.exp(.171*(a+1));return r*=.11/t.total},powerSpectrum:h,mfcc:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError("Valid ampSpectrum is required to generate MFCC");if("object"!=typeof t.melFilterBank)throw new TypeError("Valid melFilterBank is required to generate MFCC");let e=Math.min(40,Math.max(1,t.numberOfMFCCCoefficients||13)),r=h(t),a=t.melFilterBank.length,i=Array(a);if(a<e)throw new Error("Insufficient filter bank for requested number of coefficients");let n=new Float32Array(a);for(let e=0;e<n.length;e++){i[e]=new Float32Array(t.bufferSize/2),n[e]=0;for(let a=0;a<t.bufferSize/2;a++)i[e][a]=t.melFilterBank[e][a]*r[a],n[e]+=i[e][a];n[e]=Math.log(n[e]+1)}let o=Array.prototype.slice.call(n);return f(o).slice(0,e)},chroma:function(t){if("object"!=typeof t.ampSpectrum)throw new TypeError("Valid ampSpectrum is required to generate chroma");if("object"!=typeof t.chromaFilterBank)throw new TypeError("Valid chromaFilterBank is required to generate chroma");var e=t.chromaFilterBank.map(((e,r)=>t.ampSpectrum.reduce(((t,r,a)=>t+r*e[a]),0))),r=Math.max(...e);return r?e.map((t=>t/r)):e},spectralFlux:function(t){if("object"!=typeof t.signal||"object"!=typeof t.previousSignal)throw new TypeError;let e=0;for(let r=-t.bufferSize/2;r<signal.length/2-1;r++)x=Math.abs(t.signal[r])-Math.abs(t.previousSignal[r]),e+=(x+Math.abs(x))/2;return e}});function g(t){if(Array.isArray(t)){for(var e=0,r=Array(t.length);e<t.length;e++)r[e]=t[e];return r}return Array.from(t)}var w={},S={},_={bitReverseArray:function(t){if(void 0===w[t]){for(var e=(t-1).toString(2).length,r="0".repeat(e),a={},i=0;i<t;i++){var n=i.toString(2);n=r.substr(n.length)+n,n=[].concat(g(n)).reverse().join(""),a[i]=parseInt(n,2)}w[t]=a}return w[t]},multiply:function(t,e){return{real:t.real*e.real-t.imag*e.imag,imag:t.real*e.imag+t.imag*e.real}},add:function(t,e){return{real:t.real+e.real,imag:t.imag+e.imag}},subtract:function(t,e){return{real:t.real-e.real,imag:t.imag-e.imag}},euler:function(t,e){var r=-2*Math.PI*t/e;return{real:Math.cos(r),imag:Math.sin(r)}},conj:function(t){return t.imag*=-1,t},constructComplexArray:function(t){var e={};e.real=void 0===t.real?t.slice():t.real.slice();var r=e.real.length;return void 0===S[r]&&(S[r]=Array.apply(null,Array(r)).map(Number.prototype.valueOf,0)),e.imag=S[r].slice(),e}},d=function(t){var e={};void 0===t.real||void 0===t.imag?e=_.constructComplexArray(t):(e.real=t.real.slice(),e.imag=t.imag.slice());var r=e.real.length,a=Math.log2(r);if(Math.round(a)!=a)throw new Error("Input size must be a power of 2.");if(e.real.length!=e.imag.length)throw new Error("Real and imaginary components must have the same length.");for(var i=_.bitReverseArray(r),n={real:[],imag:[]},o=0;o<r;o++)n.real[i[o]]=e.real[o],n.imag[i[o]]=e.imag[o];for(var s=0;s<r;s++)e.real[s]=n.real[s],e.imag[s]=n.imag[s];for(var l=1;l<=a;l++)for(var u=Math.pow(2,l),m=0;m<u/2;m++)for(var h=_.euler(m,u),p=0;p<r/u;p++){var f=u*p+m,c=u*p+m+u/2,g={real:e.real[f],imag:e.imag[f]},w={real:e.real[c],imag:e.imag[c]},S=_.multiply(h,w),d=_.subtract(g,S);e.real[c]=d.real,e.imag[c]=d.imag;var y=_.add(S,g);e.real[f]=y.real,e.imag[f]=y.imag}return e},y=d;class b{constructor(t,e){if(this._m=e,!t.audioContext)throw this._m.errors.noAC;if(t.bufferSize&&!r(t.bufferSize))throw this._m._errors.notPow2;if(!t.source)throw this._m._errors.noSource;this._m.audioContext=t.audioContext,this._m.bufferSize=t.bufferSize||this._m.bufferSize||256,this._m.hopSize=t.hopSize||this._m.hopSize||this._m.bufferSize,this._m.sampleRate=t.sampleRate||this._m.audioContext.sampleRate||44100,this._m.callback=t.callback,this._m.windowingFunction=t.windowingFunction||"hanning",this._m.featureExtractors=c,this._m.EXTRACTION_STARTED=t.startImmediately||!1,this._m.channel="number"==typeof t.channel?t.channel:0,this._m.inputs=t.inputs||1,this._m.outputs=t.outputs||1,this._m.numberOfMFCCCoefficients=t.numberOfMFCCCoefficients||this._m.numberOfMFCCCoefficients||13,this._m.spn=this._m.audioContext.createScriptProcessor(this._m.bufferSize,this._m.inputs,this._m.outputs),this._m.spn.connect(this._m.audioContext.destination),this._m._featuresToExtract=t.featureExtractors||[],this._m.barkScale=i(this._m.bufferSize,this._m.sampleRate,this._m.bufferSize),this._m.melFilterBank=s(Math.max(this._m.melBands,this._m.numberOfMFCCCoefficients),this._m.sampleRate,this._m.bufferSize),this._m.inputData=null,this._m.previousInputData=null,this._m.frame=null,this._m.previousFrame=null,this.setSource(t.source),this._m.spn.onaudioprocess=t=>{if(null!==this._m.inputData&&(this._m.previousInputData=this._m.inputData),this._m.inputData=t.inputBuffer.getChannelData(this._m.channel),this._m.previousInputData)(e=new Float32Array(this._m.previousInputData.length+this._m.inputData.length-this._m.hopSize)).set(this._m.previousInputData.slice(this._m.hopSize)),e.set(this._m.inputData,this._m.previousInputData.length-this._m.hopSize);else var e=this._m.inputData;(function(t,e,r){if(t.length<e)throw new Error("Buffer is too short for frame length");if(r<1)throw new Error("Hop length cannot be less that 1");if(e<1)throw new Error("Frame length cannot be less that 1");const a=1+Math.floor((t.length-e)/r);return new Array(a).fill(0).map(((a,i)=>t.slice(i*r,i*r+e)))})(e,this._m.bufferSize,this._m.hopSize).forEach((t=>{this._m.frame=t;var e=this._m.extract(this._m._featuresToExtract,this._m.frame,this._m.previousFrame);"function"==typeof this._m.callback&&this._m.EXTRACTION_STARTED&&this._m.callback(e),this._m.previousFrame=this._m.frame}))}}start(t){this._m._featuresToExtract=t||this._m._featuresToExtract,this._m.EXTRACTION_STARTED=!0}stop(){this._m.EXTRACTION_STARTED=!1}setSource(t){this._m.source&&this._m.source.disconnect(this._m.spn),this._m.source=t,this._m.source.connect(this._m.spn)}setChannel(t){t<=this._m.inputs?this._m.channel=t:console.error(`Channel ${t} does not exist. Make sure you've provided a value for 'inputs' that is greater than ${t} when instantiating the MeydaAnalyzer`)}get(t){return this._m.inputData?this._m.extract(t||this._m._featuresToExtract,this._m.inputData,this._m.previousInputData):null}}var v={audioContext:null,spn:null,bufferSize:512,sampleRate:44100,melBands:26,chromaBands:12,callback:null,windowingFunction:"hanning",featureExtractors:c,EXTRACTION_STARTED:!1,numberOfMFCCCoefficients:13,_featuresToExtract:[],windowing:a,_errors:{notPow2:new Error("Meyda: Buffer size must be a power of 2, e.g. 64 or 512"),featureUndef:new Error("Meyda: No features defined."),invalidFeatureFmt:new Error("Meyda: Invalid feature format"),invalidInput:new Error("Meyda: Invalid input."),noAC:new Error("Meyda: No AudioContext specified."),noSource:new Error("Meyda: No source node specified.")},createMeydaAnalyzer:function(t){return new b(t,Object.assign({},v))},extract:function(t,e,a){if(!e)throw this._errors.invalidInput;if("object"!=typeof e)throw this._errors.invalidInput;if(!t)throw this._errors.featureUndef;if(!r(e.length))throw this._errors.notPow2;void 0!==this.barkScale&&this.barkScale.length==this.bufferSize||(this.barkScale=i(this.bufferSize,this.sampleRate,this.bufferSize)),void 0!==this.melFilterBank&&this.barkScale.length==this.bufferSize&&this.melFilterBank.length==this.melBands||(this.melFilterBank=s(Math.max(this.melBands,this.numberOfMFCCCoefficients),this.sampleRate,this.bufferSize)),void 0!==this.chromaFilterBank&&this.chromaFilterBank.length==this.chromaBands||(this.chromaFilterBank=l(this.chromaBands,this.sampleRate,this.bufferSize)),void 0===e.buffer?this.signal=n(e):this.signal=e;let o=M(e,this.windowingFunction,this.bufferSize);if(this.signal=o.windowedSignal,this.complexSpectrum=o.complexSpectrum,this.ampSpectrum=o.ampSpectrum,a){let t=M(a,this.windowingFunction,this.bufferSize);this.previousSignal=t.windowedSignal,this.previousComplexSpectrum=t.complexSpectrum,this.previousAmpSpectrum=t.ampSpectrum}const u=t=>this.featureExtractors[t]({ampSpectrum:this.ampSpectrum,chromaFilterBank:this.chromaFilterBank,complexSpectrum:this.complexSpectrum,signal:this.signal,bufferSize:this.bufferSize,sampleRate:this.sampleRate,barkScale:this.barkScale,melFilterBank:this.melFilterBank,previousSignal:this.previousSignal,previousAmpSpectrum:this.previousAmpSpectrum,previousComplexSpectrum:this.previousComplexSpectrum,numberOfMFCCCoefficients:this.numberOfMFCCCoefficients});if("object"==typeof t)return t.reduce(((t,e)=>Object.assign({},t,{[e]:u(e)})),{});if("string"==typeof t)return u(t);throw this._errors.invalidFeatureFmt}},M=function(t,e,r){var i={};void 0===t.buffer?i.signal=n(t):i.signal=t,i.windowedSignal=a(i.signal,e),i.complexSpectrum=y(i.windowedSignal),i.ampSpectrum=new Float32Array(r/2);for(var o=0;o<r/2;o++)i.ampSpectrum[o]=Math.sqrt(Math.pow(i.complexSpectrum.real[o],2)+Math.pow(i.complexSpectrum.imag[o],2));return i};return"undefined"!=typeof window&&(window.Meyda=v),v}));
//# sourceMappingURL=meyda.min.js.map
{
"name": "meyda",
"version": "5.1.8-beta.1",
"version": "5.1.8",
"description": "Real-time feature extraction for the web audio api",

@@ -12,8 +12,5 @@ "main": "./dist/node/main.js",

"scripts": {
"pretest": "npm run lint && npm run build",
"test": "jest",
"build": "NODE_ENV=production; rollup -c rollup.config.js",
"default": "npm test && npm run lint",
"lint": "eslint src __tests__",
"clean": "rm -rf dist node_modules",
"lint": "eslint -f compact src __tests__",
"generatereferencedocs": "jsdoc src/meyda-wa.js src/main.js -d docs/reference -R docs/README.md",

@@ -47,27 +44,20 @@ "semantic-release": "semantic-release"

"devDependencies": {
"@babel/core": "^7.7.7",
"@babel/preset-env": "^7.7.7",
"@commitlint/cli": "^12.0.1",
"@commitlint/config-conventional": "^12.0.1",
"@commitlint/travis-cli": "^12.0.1",
"@babel/core": "^7.14.3",
"@babel/preset-env": "^7.14.4",
"@commitlint/cli": "^12.1.4",
"@commitlint/config-conventional": "^12.1.4",
"@rollup/plugin-babel": "^5.3.0",
"@rollup/plugin-commonjs": "^19.0.0",
"@rollup/plugin-node-resolve": "^13.0.0",
"babel-loader": "^8.1.0",
"babel-plugin-add-module-exports": "^1.0.2",
"commitizen": "^4.0.3",
"eslint": "^7.1.0",
"babel-plugin-add-module-exports": "^1.0.4",
"commitizen": "^4.2.4",
"eslint": "^7.27.0",
"glob": "^7.1.7",
"husky": "^6.0.0",
"jest": "^27.0.1",
"jest-github-reporter": "^1.0.0",
"jsdoc": "^3.6.3",
"rollup": "^2.49.0",
"jest": "^27.0.3",
"jsdoc": "^3.6.7",
"rollup": "^2.50.5",
"rollup-plugin-node-polyfills": "^0.2.1",
"rollup-plugin-terser": "^7.0.2",
"semantic-release": "^17.0.7",
"terser-webpack-plugin": "^5.1.1",
"webpack": "^5.11.1",
"webpack-cli": "^4.3.1",
"webpack-stream": "^6.0.0"
"semantic-release": "^17.4.3"
},

@@ -78,3 +68,3 @@ "dependencies": {

"node-getopt": "^0.3.2",
"wav": "^1.0.0"
"wav": "^1.0.2"
},

@@ -86,4 +76,3 @@ "engines": {

"reporters": [
"default",
"jest-github-reporter"
"default"
],

@@ -90,0 +79,0 @@ "testPathIgnorePatterns": [

# meyda
[![Build Status][build-status-image]][meyda-travis]
![Build Status][build-status-image]
Meyda is a Javascript audio feature extraction library. Meyda supports both offline feature extraction as well as real-time feature extraction using the [Web Audio API][web-audio-api]. We wrote a paper about it, which is available [here][meyda-paper].

@@ -11,2 +12,3 @@

### Usage
Please see [the documentation][docs] for setup and usage instructions.

@@ -18,4 +20,3 @@

[build-status-image]: https://travis-ci.org/meyda/meyda.svg?branch=master
[meyda-travis]: https://travis-ci.org/meyda/meyda
[build-status-image]: https://img.shields.io/github/checks-status/meyda/meyda/master
[web-audio-api]: https://github.com/WebAudio/web-audio-api

@@ -22,0 +23,0 @@ [meyda-paper]: http://doc.gold.ac.uk/~mu202hr/publications/RawlinsonSegalFiala_WAC2015.pdf

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc