audio-node-suite
Advanced tools
Comparing version 1.0.0 to 1.1.0
@@ -32,3 +32,3 @@ /* | ||
outDir: "lib", | ||
minify: !(mode === "development"), | ||
minify: false, | ||
target: "es2022", | ||
@@ -35,0 +35,0 @@ sourcemap: (mode === "development"), |
@@ -1,1 +0,681 @@ | ||
"use strict";class m extends GainNode{constructor(i,e=i){if(super(i.context),this._bypass=!1,this._targets=[],typeof i!="object"||!(i instanceof AudioNode))throw new Error("input has to be a valid AudioNode");const n=i.context;let t;if(i.numberOfInputs>0)t=n.createGain(),t.connect(i);else{const o=n.createBufferSource();o.buffer=null,t=o}t._targets=[],t._bypass=!1,t._connect=t.connect;const s=(...o)=>{t._targets.push(o);let l;return t._bypass?i.numberOfInputs>0?l=t._connect(...o):l=i.connect(...o):l=e.connect(...o),l};return t.connect=s,t._disconnect=t.disconnect,t.disconnect=(...o)=>{let l;return t._bypass?i.numberOfInputs>0?l=t._disconnect(...o):l=i.connect(...o):l=e.disconnect(...o),t._targets=t._targets.filter(c=>{if(c.length!==o.length)return!0;for(let u=0;u<o.length;u++)if(c[u]!==o[u])return!0;return!1}),l},t.bypass=o=>{if(t._bypass!==o)if(t._bypass=o,t._bypass){i.numberOfInputs>0&&t._disconnect(i);for(const l of t._targets)e.disconnect(...l),t._connect(...l)}else{for(const l of t._targets)t._disconnect.apply(null,l),e.connect(...l);i.numberOfInputs>0&&t._connect(i)}},t.input=i,t.output=e,t}static factory(i){if(i.length<1)throw new Error("at least one node has to be given");for(let e=0;e<i.length-1;e++)i[e].connect(i[e+1]);return new m(i[0],i[i.length-1])}}const N=d=>Math.pow(10,d/20),C=d=>20*Math.log10(d),V=(d,i,e)=>(d<i?d=i:d>e&&(d=e),d),v=(d,i,e,n)=>{const t=d.length<n?d.length:n;let s=0,o=0;for(let l=0;l<=e;l++){const c=l+(t-e);s+=c*d[l],o+=c}if(!i)for(let l=e+1;l<t;l++){const c=l-(e+1);s+=c*d[l],o+=c}return s/=o,s};class _{constructor(i){if(this.timer=NaN,this.timerStop=!1,window!==void 0){const e=()=>{i(),this.timerStop||window.requestAnimationFrame(e)};window.requestAnimationFrame(e)}else this.timer=setInterval(()=>i(),1e3/60)}clear(){window!==void 0?this.timerStop=!0:clearTimeout(this.timer)}}class G{constructor(i,e={}){e.type??="pink",e.channels??=1;const n=5*i.sampleRate,t=i.createBuffer(e.channels,n,i.sampleRate);if(e.type==="white")for(let o=0;o<n;o++){const l=Math.random()*2-1;for(let c=0;c<e.channels;c++){const u=t.getChannelData(c);u[o]=l}}else if(e.type==="pink"){const o=[];for(let h=0;h<e.channels;h++){o[h]=new Float32Array(n);const r=[0,0,0,0,0,0,0];for(let b=0;b<n;b++){const a=Math.random()*2-1;r[0]=.99886*r[0]+a*.0555179,r[1]=.99332*r[1]+a*.0750759,r[2]=.969*r[2]+a*.153852,r[3]=.8665*r[3]+a*.3104856,r[4]=.55*r[4]+a*.5329522,r[5]=-.7616*r[5]-a*.016898,o[h][b]=r[0]+r[1]+r[2]+r[3]+r[4]+r[5]+r[6]+a*.5362,r[6]=a*.115926}}const l=[],c=[];for(let h=0;h<o.length;h++)l.push(Math.min(...o[h])),c.push(Math.max(...o[h]));const u=Math.min(...l),g=Math.max(...c),f=2147483647/2147483648/Math.max(Math.abs(u),g);for(let h=0;h<e.channels;h++)for(let r=0;r<n;r++)t.getChannelData(h)[r]=o[h][r]*f}const s=i.createBufferSource();return s.buffer=t,s.loop=!0,s.start(0),new m(s)}}class j{constructor(i,e={}){e.muted??=!1;const n=i.createGain();n.gain.setValueAtTime(e.muted?0:1,i.currentTime);const t=new m(n);return t.mute=(s,o=10)=>{const l=s?0:1;console.log("FUCK",s,l),n.gain.linearRampToValueAtTime(l,i.currentTime+o/1e3)},t}}class B{constructor(i,e={}){e.gain??=0;const n=i.createGain();n.gain.setValueAtTime(N(e.gain),i.currentTime);const t=new m(n);return t.adjustGainDecibel=(s,o=10)=>{t.input.gain.linearRampToValueAtTime(N(s),i.currentTime+o/1e3)},t}}class R{constructor(i,e={}){e.threshold??=-16,e.attack??=.003,e.release??=.4,e.knee??=3,e.ratio??=2;const n=i.createDynamicsCompressor();return n.threshold.setValueAtTime(e.threshold,i.currentTime),n.knee.setValueAtTime(e.knee,i.currentTime),n.ratio.setValueAtTime(e.ratio,i.currentTime),n.attack.setValueAtTime(e.attack,i.currentTime),n.release.setValueAtTime(e.release,i.currentTime),new m(n)}}class z{constructor(i,e={}){e.threshold??=-3,e.attack??=.001,e.release??=.05,e.knee??=0,e.ratio??=20;const n=i.createDynamicsCompressor();return n.threshold.setValueAtTime(e.threshold,i.currentTime),n.knee.setValueAtTime(e.knee,i.currentTime),n.ratio.setValueAtTime(e.ratio,i.currentTime),n.attack.setValueAtTime(e.attack,i.currentTime),n.release.setValueAtTime(e.release,i.currentTime),new m(n)}}class M{constructor(i,e={}){e.bands??=[];const n=[];if(e.bands.length<1)throw new Error("at least one band has to be specified");for(let t=0;t<e.bands.length;t++){const s={type:"peaking",freq:64*Math.pow(2,t),q:1,gain:1,...e.bands[t]},o=i.createBiquadFilter();o.type=s.type,o.frequency.setValueAtTime(s.freq,i.currentTime),o.Q.setValueAtTime(s.q,i.currentTime),o.gain.setValueAtTime(s.gain,i.currentTime),n.push(o),t>0&&n[t-1].connect(n[t])}return e.bands.length===1?new m(n[0]):new m(n[0],n[n.length-1])}}class k{constructor(i,e={}){e.fftSize??=512,e.minDecibels??=-94,e.maxDecibels??=0,e.smoothingTimeConstant??=.8,e.intervalTime??=3,e.intervalCount??=100;const n=i.createAnalyser();n.fftSize=e.fftSize,n.minDecibels=e.minDecibels,n.maxDecibels=e.maxDecibels,n.smoothingTimeConstant=e.smoothingTimeConstant;const t={peak:-1/0,rms:-1/0,rmsM:-1/0,rmsS:-1/0},s=e.intervalCount;let o=!0,l=0;const c=[],u=new Float32Array(n.fftSize),g=new Float32Array(n.frequencyBinCount),f=()=>{n.getFloatTimeDomainData(u),n.getFloatFrequencyData(g);let r=0,b=-1/0;for(let a=0;a<u.length;a++){const w=u[a]*u[a];r+=w,b<w&&(b=w)}t.rms=V(C(Math.sqrt(r/u.length)),e.minDecibels,e.maxDecibels),t.peak=V(C(Math.sqrt(b)),e.minDecibels,e.maxDecibels),s>0&&(l===s-1&&o&&(o=!1),l=(l+1)%s,c[l]=t.rms,t.rmsM=v(c,o,l,s))};setInterval(f,e.intervalTime),f();const h=new m(n);return h.dataT=()=>u,h.dataF=()=>g,h.stat=()=>t,h}}class F{constructor(i,e={}){e.threshold??=-45,e.hysteresis??=-3,e.reduction??=-30,e.interval??=2,e.attack??=4,e.hold??=40,e.release??=200;const n=new k(i,{fftSize:512,minDecibels:-94,maxDecibels:0,smoothingTimeConstant:.8,intervalTime:2,intervalCount:25}),t=i.createGain();n.connect(t);let s="open",o=NaN;const l=1,c=N(e.reduction),u=()=>{const g=n.stat().rmsM;s==="closed"?g>=e.threshold&&(s="attack",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(l,i.currentTime+e.attack/1e3),Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="open"},e.attack)):s==="attack"?g<e.threshold+e.hysteresis&&(s="release",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(c,i.currentTime+e.release/1e3),Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="closed"},e.release)):s==="open"?g<e.threshold+e.hysteresis&&(s="hold",Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="release",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(c,i.currentTime+e.release/1e3),o=setTimeout(()=>{s="closed"},e.release)},e.hold)):s==="hold"?g>=e.threshold&&(s="open",Number.isNaN(o)||clearTimeout(o)):s==="release"&&g>=e.threshold&&(s="attack",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(c,i.currentTime+e.attack/1e3),Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="open"},e.attack)),setTimeout(u,e.interval)};return setTimeout(u,e.interval),new m(n,t)}}class E{constructor(i,e={}){e.fftSize??=512,e.minDecibels??=-60,e.maxDecibels??=0,e.smoothingTimeConstant??=.8,e.intervalTime??=1e3/60,e.intervalCount??=Math.round(300/(1e3/60)),e.decibelBars??=[-60,-45,-21,-6],e.colorBars??=["#306090","#00b000","#e0d000","#e03030"],e.colorBarsDeactive??=["#606060","#808080","#a0a0a0","#c0c0c0"],e.colorRMS??="#ffffff",e.colorBackground??="#000000",e.horizontal??=!1;const n=new k(i,{fftSize:e.fftSize,minDecibels:e.minDecibels,maxDecibels:e.maxDecibels,smoothingTimeConstant:e.smoothingTimeConstant,intervalTime:e.intervalTime,intervalCount:e.intervalCount});let t=[],s,o=!1;n.deactive=c=>{o=c};const l=c=>{const u=n.stat().peak,g=n.stat().rmsM,f=c.getContext("2d");f.fillStyle=e.colorBackground,f.fillRect(0,0,c.width,c.height);const h=o?e.colorBarsDeactive:e.colorBars,r=T=>e.horizontal?T/(e.maxDecibels-e.minDecibels)*c.width:T/(e.maxDecibels-e.minDecibels)*c.height,b=(T,A,I)=>{const q=r(Math.abs(A-e.minDecibels)),S=r(Math.abs(A-T));f.fillStyle=I,e.horizontal?f.fillRect(q-S,0,S,c.height):f.fillRect(0,c.height-q,c.width,S)},a=Math.min(e.decibelBars.length,h.length);let w=e.minDecibels,y=h[0];for(let T=0;T<a&&!(u<e.decibelBars[T]);T++){const A=e.decibelBars[T];b(w,A,y),y=h[T],w=A}b(w,u,y);const D=r(Math.abs(g-e.minDecibels));f.fillStyle=e.colorRMS,e.horizontal?f.fillRect(D-1,0,1,c.height):f.fillRect(0,c.height-D,c.width,1)};return n.draw=function(c){t.push(c),t.length===1&&(s=new _(()=>{for(const u of t)l(u)}))},n.undraw=function(c){t=t.filter(u=>u!==c),t.length===0&&s.clear()},n}}class O{constructor(i,e={}){e.fftSize??=8192,e.minDecibels??=-144,e.maxDecibels??=0,e.smoothingTimeConstant??=.8,e.intervalTime??=1e3/60,e.layers??=[-120,-90,-60,-50,-40,-30,-20,-10],e.slices??=[40,80,160,320,640,1280,2560,5120,10240,20480],e.colorBackground??="#000000",e.colorBars??="#00cc00",e.colorLayers??="#009900",e.colorSlices??="#009900",e.logarithmic??=!0;const n=new k(i,{fftSize:e.fftSize,minDecibels:e.minDecibels,maxDecibels:e.maxDecibels,smoothingTimeConstant:e.smoothingTimeConstant,intervalTime:e.intervalTime,intervalCount:0});let t=[],s;const o=l=>{const c=n.dataF(),u=l.getContext("2d");u.fillStyle=e.colorBackground,u.fillRect(0,0,l.width,l.height);const g=f=>f/(e.maxDecibels-e.minDecibels)*l.height;u.fillStyle=e.colorLayers;for(const f of e.layers){const h=g(Math.abs(f-e.minDecibels));u.fillRect(0,l.height-h,l.width,1)}u.fillStyle=e.colorSlices;for(const f of e.slices){const h=Math.log2(f/20)*(l.width/10);u.fillRect(h,0,1,l.height)}if(u.fillStyle=e.colorBars,e.logarithmic)for(let f=0;f<l.width;f++){const r=20*Math.pow(2,f*10/l.width),b=20*Math.pow(2,(f+1)*10/l.width),a=Math.round(r*(c.length/(20*Math.pow(2,10))));let w=Math.round(b*(c.length/(20*Math.pow(2,10))))-1;w<a&&(w=a);let y=0;for(let T=a;T<=w;T++)y+=c[T];y/=w+1-a;const D=g(y-e.minDecibels);u.fillRect(f,l.height-D,1,D)}else{let f=0;const h=l.width/c.length;for(let r=0;r<c.length;r++){const b=c[r],a=g(b-e.minDecibels);u.fillRect(f,l.height-a,h-.5,a),f+=h}}};return n.draw=function(l){t.push(l),t.length===1&&(s=new _(()=>{for(const c of t)o(c)}))},n.undraw=function(l){t=t.filter(c=>c!==l),t.length===0&&s.clear()},n}}class L{constructor(i,e={}){e.equalizer??=!0,e.noisegate??=!0,e.compressor??=!0,e.limiter??=!0,e.gain??=0;const n=[];let t=0;const s=new j(i);if(n.push(s),e.equalizer){const c=new M(i,{bands:[{type:"highpass",freq:80,q:.25},{type:"highpass",freq:80,q:.5},{type:"notch",freq:50,q:.25},{type:"notch",freq:960,q:4},{type:"lowpass",freq:20480,q:.5},{type:"lowpass",freq:20480,q:.25}]});n.push(c)}if(e.noisegate){const c=new F(i);n.push(c)}if(e.compressor){const c=new R(i,{threshold:-16,attack:.003,release:.4,knee:3,ratio:2});n.push(c),t+=-2}if(e.equalizer){const c=new M(i,{bands:[{type:"peaking",freq:240,q:.75,gain:3},{type:"highshelf",freq:3840,q:.75,gain:6}]});n.push(c),t+=-1}const o=new B(i);if(n.push(o),e.limiter){const c=new z(i,{threshold:-3,attack:.001,release:.05,knee:0,ratio:20});n.push(c),t+=-1}const l=m.factory(n);return l.mute=c=>s.mute(c),l.adjustGainDecibel=(c,u=10)=>o.adjustGainDecibel(t+c,u),l.adjustGainDecibel(t+e.gain,0),l}}const W={AudioNodeComposite:m,AudioNodeNoise:G,AudioNodeGain:B,AudioNodeCompressor:R,AudioNodeLimiter:z,AudioNodeEqualizer:M,AudioNodeMeter:k,AudioNodeGate:F,AudioNodeAmplitude:E,AudioNodeSpectrum:O,AudioNodeVoice:L};module.exports=W; | ||
"use strict"; | ||
class AudioNodeComposite extends GainNode { | ||
/* tracked connected targets */ | ||
/* just pass-through construction */ | ||
constructor(context) { | ||
super(context); | ||
this.input = null; | ||
this.output = null; | ||
this._bypass = false; | ||
this._targets = []; | ||
} | ||
/* configure input/output chain */ | ||
chain(input, output = input) { | ||
if (typeof input !== "object" || !(input instanceof AudioNode)) | ||
throw new Error("input has to be a valid AudioNode"); | ||
this.input = input; | ||
this.output = output; | ||
if (this._bypass) { | ||
for (const _target of this._targets) | ||
super.connect(..._target); | ||
} else { | ||
for (const _target of this._targets) { | ||
super.disconnect(..._target); | ||
this.output.connect(..._target); | ||
} | ||
if (this.input.numberOfInputs > 0) | ||
super.connect(this.input); | ||
} | ||
} | ||
/* provide an overloaded Web API "connect" method */ | ||
connect(...args) { | ||
this._targets.push(args); | ||
let result; | ||
if (this._bypass || this.output === null) | ||
result = super.connect(...args); | ||
else | ||
result = this.output.connect(...args); | ||
return result; | ||
} | ||
/* provide an overloaded Web API "disconnect" method */ | ||
disconnect(...args) { | ||
let result; | ||
if (this._bypass || this.output === null) | ||
result = super.disconnect(...args); | ||
else | ||
result = this.output.disconnect(...args); | ||
this._targets = this._targets.filter((_target) => { | ||
if (_target.length !== args.length) | ||
return true; | ||
for (let i = 0; i < args.length; i++) | ||
if (_target[i] !== args[i]) | ||
return true; | ||
return false; | ||
}); | ||
return result; | ||
} | ||
/* provide a custom "bypass" method */ | ||
bypass(bypass) { | ||
if (this._bypass === bypass) | ||
return; | ||
this._bypass = bypass; | ||
if (this._bypass) { | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
super.disconnect(this.input); | ||
for (const _target of this._targets) { | ||
if (this.output !== null) | ||
this.output.disconnect(..._target); | ||
super.connect(..._target); | ||
} | ||
} else { | ||
for (const _target of this._targets) { | ||
super.disconnect(..._target); | ||
if (this.output !== null) | ||
this.output.connect(..._target); | ||
} | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
super.connect(this.input); | ||
} | ||
} | ||
/* provide convenient factory method */ | ||
static factory(context, nodes) { | ||
if (nodes.length < 1) | ||
throw new Error("at least one node has to be given"); | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]); | ||
const composite = new AudioNodeComposite(context); | ||
composite.chain(nodes[0], nodes[nodes.length - 1]); | ||
return composite; | ||
} | ||
} | ||
const dBFSToGain = (dbfs) => Math.pow(10, dbfs / 20); | ||
const gainTodBFS = (gain) => 20 * Math.log10(gain); | ||
const ensureWithin = (val, min, max) => { | ||
if (val < min) | ||
val = min; | ||
else if (val > max) | ||
val = max; | ||
return val; | ||
}; | ||
const weightedAverage = (arr, init, pos, len) => { | ||
const max = arr.length < len ? arr.length : len; | ||
let avg = 0; | ||
let num = 0; | ||
for (let i = 0; i <= pos; i++) { | ||
const w = i + (max - pos); | ||
avg += w * arr[i]; | ||
num += w; | ||
} | ||
if (!init) { | ||
for (let i = pos + 1; i < max; i++) { | ||
const w = i - (pos + 1); | ||
avg += w * arr[i]; | ||
num += w; | ||
} | ||
} | ||
avg /= num; | ||
return avg; | ||
}; | ||
class AnimationFrameTimer { | ||
constructor(cb) { | ||
this.timer = NaN; | ||
this.timerStop = false; | ||
if (window !== void 0) { | ||
const once = () => { | ||
cb(); | ||
if (!this.timerStop) | ||
window.requestAnimationFrame(once); | ||
}; | ||
window.requestAnimationFrame(once); | ||
} else | ||
this.timer = setInterval(() => cb(), 1e3 / 60); | ||
} | ||
clear() { | ||
if (window !== void 0) | ||
this.timerStop = true; | ||
else | ||
clearTimeout(this.timer); | ||
} | ||
} | ||
class AudioNodeNoise extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.type ??= "pink"; | ||
params.channels ??= 1; | ||
const lengthInSamples = 5 * context.sampleRate; | ||
const buffer = context.createBuffer(params.channels, lengthInSamples, context.sampleRate); | ||
if (params.type === "white") { | ||
for (let i = 0; i < lengthInSamples; i++) { | ||
const rand = Math.random() * 2 - 1; | ||
for (let j = 0; j < params.channels; j++) { | ||
const data = buffer.getChannelData(j); | ||
data[i] = rand; | ||
} | ||
} | ||
} else if (params.type === "pink") { | ||
const pink = []; | ||
for (let i = 0; i < params.channels; i++) { | ||
pink[i] = new Float32Array(lengthInSamples); | ||
const b = [0, 0, 0, 0, 0, 0, 0]; | ||
for (let j = 0; j < lengthInSamples; j++) { | ||
const white = Math.random() * 2 - 1; | ||
b[0] = 0.99886 * b[0] + white * 0.0555179; | ||
b[1] = 0.99332 * b[1] + white * 0.0750759; | ||
b[2] = 0.969 * b[2] + white * 0.153852; | ||
b[3] = 0.8665 * b[3] + white * 0.3104856; | ||
b[4] = 0.55 * b[4] + white * 0.5329522; | ||
b[5] = -0.7616 * b[5] - white * 0.016898; | ||
pink[i][j] = b[0] + b[1] + b[2] + b[3] + b[4] + b[5] + b[6] + white * 0.5362; | ||
b[6] = white * 0.115926; | ||
} | ||
} | ||
const minA = []; | ||
const maxA = []; | ||
for (let i = 0; i < pink.length; i++) { | ||
minA.push(Math.min(...pink[i])); | ||
maxA.push(Math.max(...pink[i])); | ||
} | ||
const min = Math.min(...minA); | ||
const max = Math.max(...maxA); | ||
const coefficient = 2147483647 / 2147483648 / Math.max(Math.abs(min), max); | ||
for (let i = 0; i < params.channels; i++) | ||
for (let j = 0; j < lengthInSamples; j++) | ||
buffer.getChannelData(i)[j] = pink[i][j] * coefficient; | ||
} | ||
const bs = context.createBufferSource(); | ||
bs.channelCount = params.channels; | ||
bs.buffer = buffer; | ||
bs.loop = true; | ||
bs.start(0); | ||
this.chain(bs); | ||
} | ||
} | ||
class AudioNodeMute extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.muted ??= false; | ||
this.gain.setValueAtTime(params.muted ? 0 : 1, this.context.currentTime); | ||
} | ||
mute(_mute, ms = 10) { | ||
const value = _mute ? 0 : 1; | ||
this.gain.linearRampToValueAtTime(value, this.context.currentTime + ms / 1e3); | ||
} | ||
} | ||
class AudioNodeGain extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.gain ??= 0; | ||
this.gain.setValueAtTime(dBFSToGain(params.gain), this.context.currentTime); | ||
} | ||
adjustGainDecibel(db, ms = 10) { | ||
this.gain.linearRampToValueAtTime(dBFSToGain(db), this.context.currentTime + ms / 1e3); | ||
} | ||
} | ||
class AudioNodeCompressor extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -16; | ||
params.attack ??= 3e-3; | ||
params.release ??= 0.4; | ||
params.knee ??= 3; | ||
params.ratio ??= 2; | ||
const compressor = context.createDynamicsCompressor(); | ||
compressor.threshold.setValueAtTime(params.threshold, context.currentTime); | ||
compressor.knee.setValueAtTime(params.knee, context.currentTime); | ||
compressor.ratio.setValueAtTime(params.ratio, context.currentTime); | ||
compressor.attack.setValueAtTime(params.attack, context.currentTime); | ||
compressor.release.setValueAtTime(params.release, context.currentTime); | ||
this.chain(compressor); | ||
} | ||
} | ||
class AudioNodeLimiter extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -3; | ||
params.attack ??= 1e-3; | ||
params.release ??= 0.05; | ||
params.knee ??= 0; | ||
params.ratio ??= 20; | ||
const limiter = context.createDynamicsCompressor(); | ||
limiter.threshold.setValueAtTime(params.threshold, context.currentTime); | ||
limiter.knee.setValueAtTime(params.knee, context.currentTime); | ||
limiter.ratio.setValueAtTime(params.ratio, context.currentTime); | ||
limiter.attack.setValueAtTime(params.attack, context.currentTime); | ||
limiter.release.setValueAtTime(params.release, context.currentTime); | ||
this.chain(limiter); | ||
} | ||
} | ||
class AudioNodeEqualizer extends AudioNodeComposite { | ||
/* global BiquadFilterType */ | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.bands ??= []; | ||
const bands = []; | ||
if (params.bands.length < 1) | ||
throw new Error("at least one band has to be specified"); | ||
for (let i = 0; i < params.bands.length; i++) { | ||
const options = { | ||
type: "peaking", | ||
freq: 64 * Math.pow(2, i), | ||
q: 1, | ||
gain: 1, | ||
...params.bands[i] | ||
}; | ||
const band = context.createBiquadFilter(); | ||
band.type = options.type; | ||
band.frequency.setValueAtTime(options.freq, context.currentTime); | ||
band.Q.setValueAtTime(options.q, context.currentTime); | ||
band.gain.setValueAtTime(options.gain, context.currentTime); | ||
bands.push(band); | ||
if (i > 0) | ||
bands[i - 1].connect(bands[i]); | ||
} | ||
if (params.bands.length === 1) | ||
this.chain(bands[0]); | ||
else | ||
this.chain(bands[0], bands[bands.length - 1]); | ||
} | ||
} | ||
class AudioNodeMeter extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.fftSize ??= 512; | ||
params.minDecibels ??= -94; | ||
params.maxDecibels ??= 0; | ||
params.smoothingTimeConstant ??= 0.8; | ||
params.intervalTime ??= 3; | ||
params.intervalCount ??= 100; | ||
const analyser = context.createAnalyser(); | ||
analyser.fftSize = params.fftSize; | ||
analyser.minDecibels = params.minDecibels; | ||
analyser.maxDecibels = params.maxDecibels; | ||
analyser.smoothingTimeConstant = params.smoothingTimeConstant; | ||
this.chain(analyser); | ||
const stat = { peak: -Infinity, rms: -Infinity, rmsM: -Infinity, rmsS: -Infinity }; | ||
const rmsLen = params.intervalCount; | ||
let rmsInit = true; | ||
let rmsPos = 0; | ||
const rmsArr = []; | ||
const dataT = new Float32Array(analyser.fftSize); | ||
const dataF = new Float32Array(analyser.frequencyBinCount); | ||
const measure = () => { | ||
analyser.getFloatTimeDomainData(dataT); | ||
analyser.getFloatFrequencyData(dataF); | ||
let rms = 0; | ||
let peak = -Infinity; | ||
for (let i = 0; i < dataT.length; i++) { | ||
const square = dataT[i] * dataT[i]; | ||
rms += square; | ||
if (peak < square) | ||
peak = square; | ||
} | ||
stat.rms = ensureWithin( | ||
gainTodBFS(Math.sqrt(rms / dataT.length)), | ||
params.minDecibels, | ||
params.maxDecibels | ||
); | ||
stat.peak = ensureWithin( | ||
gainTodBFS(Math.sqrt(peak)), | ||
params.minDecibels, | ||
params.maxDecibels | ||
); | ||
if (rmsLen > 0) { | ||
if (rmsPos === rmsLen - 1 && rmsInit) | ||
rmsInit = false; | ||
rmsPos = (rmsPos + 1) % rmsLen; | ||
rmsArr[rmsPos] = stat.rms; | ||
stat.rmsM = weightedAverage(rmsArr, rmsInit, rmsPos, rmsLen); | ||
} | ||
}; | ||
setInterval(measure, params.intervalTime); | ||
measure(); | ||
this.dataT = () => dataT; | ||
this.dataF = () => dataF; | ||
this.stat = () => stat; | ||
} | ||
} | ||
class AudioNodeGate extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -45; | ||
params.hysteresis ??= -3; | ||
params.reduction ??= -30; | ||
params.interval ??= 2; | ||
params.attack ??= 4; | ||
params.hold ??= 40; | ||
params.release ??= 200; | ||
const meter = new AudioNodeMeter(context, { | ||
fftSize: 512, | ||
minDecibels: -94, | ||
maxDecibels: 0, | ||
smoothingTimeConstant: 0.8, | ||
intervalTime: 2, | ||
intervalCount: 25 | ||
}); | ||
const gain = context.createGain(); | ||
meter.connect(gain); | ||
let state = "open"; | ||
let timer = NaN; | ||
const gainOpen = 1; | ||
const gainClosed = dBFSToGain(params.reduction); | ||
const controlGain = () => { | ||
const level = meter.stat().rmsM; | ||
if (state === "closed") { | ||
if (level >= params.threshold) { | ||
state = "attack"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainOpen, context.currentTime + params.attack / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "open"; | ||
}, params.attack); | ||
} | ||
} else if (state === "attack") { | ||
if (level < params.threshold + params.hysteresis) { | ||
state = "release"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.release / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "closed"; | ||
}, params.release); | ||
} | ||
} else if (state === "open") { | ||
if (level < params.threshold + params.hysteresis) { | ||
state = "hold"; | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "release"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.release / 1e3); | ||
timer = setTimeout(() => { | ||
state = "closed"; | ||
}, params.release); | ||
}, params.hold); | ||
} | ||
} else if (state === "hold") { | ||
if (level >= params.threshold) { | ||
state = "open"; | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
} | ||
} else if (state === "release") { | ||
if (level >= params.threshold) { | ||
state = "attack"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.attack / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "open"; | ||
}, params.attack); | ||
} | ||
} | ||
setTimeout(controlGain, params.interval); | ||
}; | ||
setTimeout(controlGain, params.interval); | ||
this.chain(meter, gain); | ||
} | ||
} | ||
class AudioNodeAmplitude extends AudioNodeMeter { | ||
constructor(context, params = {}) { | ||
super(context, { | ||
fftSize: params.fftSize ??= 512, | ||
minDecibels: params.minDecibels ??= -60, | ||
maxDecibels: params.maxDecibels ??= 0, | ||
smoothingTimeConstant: params.smoothingTimeConstant ??= 0.8, | ||
intervalTime: params.intervalTime ??= 1e3 / 60, | ||
intervalCount: params.intervalCount ??= Math.round(300 / (1e3 / 60)) | ||
/* for 300ms RMS/m */ | ||
}); | ||
this._canvases = []; | ||
this._timer = null; | ||
this._deactive = false; | ||
params.decibelBars ??= [-60, -45, -21, -6]; | ||
params.colorBars ??= ["#306090", "#00b000", "#e0d000", "#e03030"]; | ||
params.colorBarsDeactive ??= ["#606060", "#808080", "#a0a0a0", "#c0c0c0"]; | ||
params.colorRMS ??= "#ffffff"; | ||
params.colorBackground ??= "#000000"; | ||
params.horizontal ??= false; | ||
this._params = params; | ||
} | ||
/* draw spectrum into canvas */ | ||
_draw(canvas) { | ||
const peak = this.stat().peak; | ||
const rms = this.stat().rmsM; | ||
const canvasCtx = canvas.getContext("2d"); | ||
canvasCtx.fillStyle = this._params.colorBackground; | ||
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | ||
const colorBars = this._deactive ? this._params.colorBarsDeactive : this._params.colorBars; | ||
const scaleToCanvasUnits = (value) => { | ||
if (this._params.horizontal) | ||
return value / (this._params.maxDecibels - this._params.minDecibels) * canvas.width; | ||
else | ||
return value / (this._params.maxDecibels - this._params.minDecibels) * canvas.height; | ||
}; | ||
const drawSeg = (from2, to, color2) => { | ||
const b = scaleToCanvasUnits(Math.abs(to - this._params.minDecibels)); | ||
const h2 = scaleToCanvasUnits(Math.abs(to - from2)); | ||
canvasCtx.fillStyle = color2; | ||
if (this._params.horizontal) | ||
canvasCtx.fillRect(b - h2, 0, h2, canvas.height); | ||
else | ||
canvasCtx.fillRect(0, canvas.height - b, canvas.width, h2); | ||
}; | ||
const len = Math.min(this._params.decibelBars.length, colorBars.length); | ||
let from = this._params.minDecibels; | ||
let color = colorBars[0]; | ||
for (let i = 0; i < len; i++) { | ||
if (peak < this._params.decibelBars[i]) | ||
break; | ||
else { | ||
const to = this._params.decibelBars[i]; | ||
drawSeg(from, to, color); | ||
color = colorBars[i]; | ||
from = to; | ||
} | ||
} | ||
drawSeg(from, peak, color); | ||
const h = scaleToCanvasUnits(Math.abs(rms - this._params.minDecibels)); | ||
canvasCtx.fillStyle = this._params.colorRMS; | ||
if (this._params.horizontal) | ||
canvasCtx.fillRect(h - 1, 0, 1, canvas.height); | ||
else | ||
canvasCtx.fillRect(0, canvas.height - h, canvas.width, 1); | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw(canvas) { | ||
this._canvases.push(canvas); | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas2 of this._canvases) | ||
this._draw(canvas2); | ||
}); | ||
} | ||
} | ||
undraw(canvas) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas); | ||
if (this._canvases.length === 0) | ||
this._timer.clear(); | ||
} | ||
/* allow deactivation control */ | ||
deactive(_deactive) { | ||
this._deactive = _deactive; | ||
} | ||
} | ||
class AudioNodeSpectrum extends AudioNodeMeter { | ||
constructor(context, params = {}) { | ||
super(context, { | ||
fftSize: params.fftSize ??= 8192, | ||
minDecibels: params.minDecibels ??= -144, | ||
maxDecibels: params.maxDecibels ??= 0, | ||
smoothingTimeConstant: params.smoothingTimeConstant ??= 0.8, | ||
intervalTime: params.intervalTime ??= 1e3 / 60, | ||
intervalCount: 0 | ||
}); | ||
this._canvases = []; | ||
this._timer = null; | ||
params.layers ??= [-120, -90, -60, -50, -40, -30, -20, -10]; | ||
params.slices ??= [40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480]; | ||
params.colorBackground ??= "#000000"; | ||
params.colorBars ??= "#00cc00"; | ||
params.colorLayers ??= "#009900"; | ||
params.colorSlices ??= "#009900"; | ||
params.logarithmic ??= true; | ||
this._params = params; | ||
} | ||
/* draw spectrum into canvas */ | ||
_draw(canvas) { | ||
const data = this.dataF(); | ||
const canvasCtx = canvas.getContext("2d"); | ||
canvasCtx.fillStyle = this._params.colorBackground; | ||
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | ||
const scaleToCanvasUnits = (value) => value / (this._params.maxDecibels - this._params.minDecibels) * canvas.height; | ||
canvasCtx.fillStyle = this._params.colorLayers; | ||
for (const layer of this._params.layers) { | ||
const barHeight = scaleToCanvasUnits(Math.abs(layer - this._params.minDecibels)); | ||
canvasCtx.fillRect(0, canvas.height - barHeight, canvas.width, 1); | ||
} | ||
canvasCtx.fillStyle = this._params.colorSlices; | ||
for (const slice of this._params.slices) { | ||
const x = Math.log2(slice / 20) * (canvas.width / 10); | ||
canvasCtx.fillRect(x, 0, 1, canvas.height); | ||
} | ||
canvasCtx.fillStyle = this._params.colorBars; | ||
if (this._params.logarithmic) { | ||
for (let posX = 0; posX < canvas.width; posX++) { | ||
const barWidth = 1; | ||
const f1 = 20 * Math.pow(2, posX * 10 / canvas.width); | ||
const f2 = 20 * Math.pow(2, (posX + 1) * 10 / canvas.width); | ||
const k1 = Math.round(f1 * (data.length / (20 * Math.pow(2, 10)))); | ||
let k2 = Math.round(f2 * (data.length / (20 * Math.pow(2, 10)))) - 1; | ||
if (k2 < k1) | ||
k2 = k1; | ||
let db = 0; | ||
for (let k = k1; k <= k2; k++) | ||
db += data[k]; | ||
db /= k2 + 1 - k1; | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels); | ||
canvasCtx.fillRect(posX, canvas.height - barHeight, barWidth, barHeight); | ||
} | ||
} else { | ||
let posX = 0; | ||
const barWidth = canvas.width / data.length; | ||
for (let i = 0; i < data.length; i++) { | ||
const db = data[i]; | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels); | ||
canvasCtx.fillRect(posX, canvas.height - barHeight, barWidth - 0.5, barHeight); | ||
posX += barWidth; | ||
} | ||
} | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw(canvas) { | ||
this._canvases.push(canvas); | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas2 of this._canvases) | ||
this._draw(canvas2); | ||
}); | ||
} | ||
} | ||
undraw(canvas) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas); | ||
if (this._canvases.length === 0) | ||
this._timer.clear(); | ||
} | ||
} | ||
class AudioNodeVoice extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
this._compensate = 0; | ||
params.equalizer ??= true; | ||
params.noisegate ??= true; | ||
params.compressor ??= true; | ||
params.limiter ??= true; | ||
params.gain ??= 0; | ||
const nodes = []; | ||
this._mute = new AudioNodeMute(context); | ||
nodes.push(this._mute); | ||
if (params.equalizer) { | ||
const cutEQ = new AudioNodeEqualizer(context, { | ||
bands: [ | ||
{ type: "highpass", freq: 80, q: 0.25 }, | ||
{ type: "highpass", freq: 80, q: 0.5 }, | ||
{ type: "notch", freq: 50, q: 0.25 }, | ||
{ type: "notch", freq: 960, q: 4 }, | ||
{ type: "lowpass", freq: 20480, q: 0.5 }, | ||
{ type: "lowpass", freq: 20480, q: 0.25 } | ||
] | ||
}); | ||
nodes.push(cutEQ); | ||
} | ||
if (params.noisegate) { | ||
const gate = new AudioNodeGate(context); | ||
nodes.push(gate); | ||
} | ||
if (params.compressor) { | ||
const comp = new AudioNodeCompressor(context, { | ||
threshold: -16, | ||
attack: 3e-3, | ||
release: 0.4, | ||
knee: 3, | ||
ratio: 2 | ||
}); | ||
nodes.push(comp); | ||
this._compensate += -2; | ||
} | ||
if (params.equalizer) { | ||
const boostEQ = new AudioNodeEqualizer(context, { | ||
bands: [ | ||
{ type: "peaking", freq: 240, q: 0.75, gain: 3 }, | ||
{ type: "highshelf", freq: 3840, q: 0.75, gain: 6 } | ||
] | ||
}); | ||
nodes.push(boostEQ); | ||
this._compensate += -1; | ||
} | ||
this._gain = new AudioNodeGain(context); | ||
nodes.push(this._gain); | ||
if (params.limiter) { | ||
const limiter = new AudioNodeLimiter(context, { | ||
threshold: -3, | ||
attack: 1e-3, | ||
release: 0.05, | ||
knee: 0, | ||
ratio: 20 | ||
}); | ||
nodes.push(limiter); | ||
this._compensate += -1; | ||
} | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]); | ||
this.chain(nodes[0], nodes[nodes.length - 1]); | ||
this.adjustGainDecibel(params.gain, 0); | ||
} | ||
/* provide mute control */ | ||
mute(mute) { | ||
this._mute.mute(mute); | ||
} | ||
/* provide gain adjustment */ | ||
adjustGainDecibel(db, ms = 10) { | ||
this._gain.adjustGainDecibel(this._compensate + db, ms); | ||
} | ||
} | ||
const audioNodeSuite = { | ||
AudioNodeComposite, | ||
AudioNodeNoise, | ||
AudioNodeGain, | ||
AudioNodeCompressor, | ||
AudioNodeLimiter, | ||
AudioNodeEqualizer, | ||
AudioNodeMeter, | ||
AudioNodeGate, | ||
AudioNodeAmplitude, | ||
AudioNodeSpectrum, | ||
AudioNodeVoice | ||
}; | ||
module.exports = audioNodeSuite; |
@@ -1,203 +0,347 @@ | ||
class m extends GainNode { | ||
constructor(i, e = i) { | ||
if (super(i.context), this._bypass = !1, this._targets = [], typeof i != "object" || !(i instanceof AudioNode)) | ||
class AudioNodeComposite extends GainNode { | ||
/* tracked connected targets */ | ||
/* just pass-through construction */ | ||
constructor(context) { | ||
super(context); | ||
this.input = null; | ||
this.output = null; | ||
this._bypass = false; | ||
this._targets = []; | ||
} | ||
/* configure input/output chain */ | ||
chain(input, output = input) { | ||
if (typeof input !== "object" || !(input instanceof AudioNode)) | ||
throw new Error("input has to be a valid AudioNode"); | ||
const n = i.context; | ||
let t; | ||
if (i.numberOfInputs > 0) | ||
t = n.createGain(), t.connect(i); | ||
else { | ||
const o = n.createBufferSource(); | ||
o.buffer = null, t = o; | ||
this.input = input; | ||
this.output = output; | ||
if (this._bypass) { | ||
for (const _target of this._targets) | ||
super.connect(..._target); | ||
} else { | ||
for (const _target of this._targets) { | ||
super.disconnect(..._target); | ||
this.output.connect(..._target); | ||
} | ||
if (this.input.numberOfInputs > 0) | ||
super.connect(this.input); | ||
} | ||
t._targets = [], t._bypass = !1, t._connect = t.connect; | ||
const s = (...o) => { | ||
t._targets.push(o); | ||
let l; | ||
return t._bypass ? i.numberOfInputs > 0 ? l = t._connect(...o) : l = i.connect(...o) : l = e.connect(...o), l; | ||
}; | ||
return t.connect = s, t._disconnect = t.disconnect, t.disconnect = (...o) => { | ||
let l; | ||
return t._bypass ? i.numberOfInputs > 0 ? l = t._disconnect(...o) : l = i.connect(...o) : l = e.disconnect(...o), t._targets = t._targets.filter((c) => { | ||
if (c.length !== o.length) | ||
return !0; | ||
for (let u = 0; u < o.length; u++) | ||
if (c[u] !== o[u]) | ||
return !0; | ||
return !1; | ||
}), l; | ||
}, t.bypass = (o) => { | ||
if (t._bypass !== o) | ||
if (t._bypass = o, t._bypass) { | ||
i.numberOfInputs > 0 && t._disconnect(i); | ||
for (const l of t._targets) | ||
e.disconnect(...l), t._connect(...l); | ||
} else { | ||
for (const l of t._targets) | ||
t._disconnect.apply(null, l), e.connect(...l); | ||
i.numberOfInputs > 0 && t._connect(i); | ||
} | ||
}, t.input = i, t.output = e, t; | ||
} | ||
/* factory for Composite Web Audio API AudioNode */ | ||
static factory(i) { | ||
if (i.length < 1) | ||
/* provide an overloaded Web API "connect" method */ | ||
connect(...args) { | ||
this._targets.push(args); | ||
let result; | ||
if (this._bypass || this.output === null) | ||
result = super.connect(...args); | ||
else | ||
result = this.output.connect(...args); | ||
return result; | ||
} | ||
/* provide an overloaded Web API "disconnect" method */ | ||
disconnect(...args) { | ||
let result; | ||
if (this._bypass || this.output === null) | ||
result = super.disconnect(...args); | ||
else | ||
result = this.output.disconnect(...args); | ||
this._targets = this._targets.filter((_target) => { | ||
if (_target.length !== args.length) | ||
return true; | ||
for (let i = 0; i < args.length; i++) | ||
if (_target[i] !== args[i]) | ||
return true; | ||
return false; | ||
}); | ||
return result; | ||
} | ||
/* provide a custom "bypass" method */ | ||
bypass(bypass) { | ||
if (this._bypass === bypass) | ||
return; | ||
this._bypass = bypass; | ||
if (this._bypass) { | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
super.disconnect(this.input); | ||
for (const _target of this._targets) { | ||
if (this.output !== null) | ||
this.output.disconnect(..._target); | ||
super.connect(..._target); | ||
} | ||
} else { | ||
for (const _target of this._targets) { | ||
super.disconnect(..._target); | ||
if (this.output !== null) | ||
this.output.connect(..._target); | ||
} | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
super.connect(this.input); | ||
} | ||
} | ||
/* provide convenient factory method */ | ||
static factory(context, nodes) { | ||
if (nodes.length < 1) | ||
throw new Error("at least one node has to be given"); | ||
for (let e = 0; e < i.length - 1; e++) | ||
i[e].connect(i[e + 1]); | ||
return new m(i[0], i[i.length - 1]); | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]); | ||
const composite = new AudioNodeComposite(context); | ||
composite.chain(nodes[0], nodes[nodes.length - 1]); | ||
return composite; | ||
} | ||
} | ||
const N = (d) => Math.pow(10, d / 20), C = (d) => 20 * Math.log10(d), V = (d, i, e) => (d < i ? d = i : d > e && (d = e), d), v = (d, i, e, n) => { | ||
const t = d.length < n ? d.length : n; | ||
let s = 0, o = 0; | ||
for (let l = 0; l <= e; l++) { | ||
const c = l + (t - e); | ||
s += c * d[l], o += c; | ||
const dBFSToGain = (dbfs) => Math.pow(10, dbfs / 20); | ||
const gainTodBFS = (gain) => 20 * Math.log10(gain); | ||
const ensureWithin = (val, min, max) => { | ||
if (val < min) | ||
val = min; | ||
else if (val > max) | ||
val = max; | ||
return val; | ||
}; | ||
const weightedAverage = (arr, init, pos, len) => { | ||
const max = arr.length < len ? arr.length : len; | ||
let avg = 0; | ||
let num = 0; | ||
for (let i = 0; i <= pos; i++) { | ||
const w = i + (max - pos); | ||
avg += w * arr[i]; | ||
num += w; | ||
} | ||
if (!i) | ||
for (let l = e + 1; l < t; l++) { | ||
const c = l - (e + 1); | ||
s += c * d[l], o += c; | ||
if (!init) { | ||
for (let i = pos + 1; i < max; i++) { | ||
const w = i - (pos + 1); | ||
avg += w * arr[i]; | ||
num += w; | ||
} | ||
return s /= o, s; | ||
} | ||
avg /= num; | ||
return avg; | ||
}; | ||
class _ { | ||
constructor(i) { | ||
if (this.timer = NaN, this.timerStop = !1, window !== void 0) { | ||
const e = () => { | ||
i(), this.timerStop || window.requestAnimationFrame(e); | ||
class AnimationFrameTimer { | ||
constructor(cb) { | ||
this.timer = NaN; | ||
this.timerStop = false; | ||
if (window !== void 0) { | ||
const once = () => { | ||
cb(); | ||
if (!this.timerStop) | ||
window.requestAnimationFrame(once); | ||
}; | ||
window.requestAnimationFrame(e); | ||
window.requestAnimationFrame(once); | ||
} else | ||
this.timer = setInterval(() => i(), 1e3 / 60); | ||
this.timer = setInterval(() => cb(), 1e3 / 60); | ||
} | ||
clear() { | ||
window !== void 0 ? this.timerStop = !0 : clearTimeout(this.timer); | ||
if (window !== void 0) | ||
this.timerStop = true; | ||
else | ||
clearTimeout(this.timer); | ||
} | ||
} | ||
class G { | ||
constructor(i, e = {}) { | ||
e.type ??= "pink", e.channels ??= 1; | ||
const n = 5 * i.sampleRate, t = i.createBuffer(e.channels, n, i.sampleRate); | ||
if (e.type === "white") | ||
for (let o = 0; o < n; o++) { | ||
const l = Math.random() * 2 - 1; | ||
for (let c = 0; c < e.channels; c++) { | ||
const u = t.getChannelData(c); | ||
u[o] = l; | ||
class AudioNodeNoise extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.type ??= "pink"; | ||
params.channels ??= 1; | ||
const lengthInSamples = 5 * context.sampleRate; | ||
const buffer = context.createBuffer(params.channels, lengthInSamples, context.sampleRate); | ||
if (params.type === "white") { | ||
for (let i = 0; i < lengthInSamples; i++) { | ||
const rand = Math.random() * 2 - 1; | ||
for (let j = 0; j < params.channels; j++) { | ||
const data = buffer.getChannelData(j); | ||
data[i] = rand; | ||
} | ||
} | ||
else if (e.type === "pink") { | ||
const o = []; | ||
for (let h = 0; h < e.channels; h++) { | ||
o[h] = new Float32Array(n); | ||
const r = [0, 0, 0, 0, 0, 0, 0]; | ||
for (let b = 0; b < n; b++) { | ||
const a = Math.random() * 2 - 1; | ||
r[0] = 0.99886 * r[0] + a * 0.0555179, r[1] = 0.99332 * r[1] + a * 0.0750759, r[2] = 0.969 * r[2] + a * 0.153852, r[3] = 0.8665 * r[3] + a * 0.3104856, r[4] = 0.55 * r[4] + a * 0.5329522, r[5] = -0.7616 * r[5] - a * 0.016898, o[h][b] = r[0] + r[1] + r[2] + r[3] + r[4] + r[5] + r[6] + a * 0.5362, r[6] = a * 0.115926; | ||
} else if (params.type === "pink") { | ||
const pink = []; | ||
for (let i = 0; i < params.channels; i++) { | ||
pink[i] = new Float32Array(lengthInSamples); | ||
const b = [0, 0, 0, 0, 0, 0, 0]; | ||
for (let j = 0; j < lengthInSamples; j++) { | ||
const white = Math.random() * 2 - 1; | ||
b[0] = 0.99886 * b[0] + white * 0.0555179; | ||
b[1] = 0.99332 * b[1] + white * 0.0750759; | ||
b[2] = 0.969 * b[2] + white * 0.153852; | ||
b[3] = 0.8665 * b[3] + white * 0.3104856; | ||
b[4] = 0.55 * b[4] + white * 0.5329522; | ||
b[5] = -0.7616 * b[5] - white * 0.016898; | ||
pink[i][j] = b[0] + b[1] + b[2] + b[3] + b[4] + b[5] + b[6] + white * 0.5362; | ||
b[6] = white * 0.115926; | ||
} | ||
} | ||
const l = [], c = []; | ||
for (let h = 0; h < o.length; h++) | ||
l.push(Math.min(...o[h])), c.push(Math.max(...o[h])); | ||
const u = Math.min(...l), g = Math.max(...c), f = 2147483647 / 2147483648 / Math.max(Math.abs(u), g); | ||
for (let h = 0; h < e.channels; h++) | ||
for (let r = 0; r < n; r++) | ||
t.getChannelData(h)[r] = o[h][r] * f; | ||
const minA = []; | ||
const maxA = []; | ||
for (let i = 0; i < pink.length; i++) { | ||
minA.push(Math.min(...pink[i])); | ||
maxA.push(Math.max(...pink[i])); | ||
} | ||
const min = Math.min(...minA); | ||
const max = Math.max(...maxA); | ||
const coefficient = 2147483647 / 2147483648 / Math.max(Math.abs(min), max); | ||
for (let i = 0; i < params.channels; i++) | ||
for (let j = 0; j < lengthInSamples; j++) | ||
buffer.getChannelData(i)[j] = pink[i][j] * coefficient; | ||
} | ||
const s = i.createBufferSource(); | ||
return s.buffer = t, s.loop = !0, s.start(0), new m(s); | ||
const bs = context.createBufferSource(); | ||
bs.channelCount = params.channels; | ||
bs.buffer = buffer; | ||
bs.loop = true; | ||
bs.start(0); | ||
this.chain(bs); | ||
} | ||
} | ||
class j { | ||
constructor(i, e = {}) { | ||
e.muted ??= !1; | ||
const n = i.createGain(); | ||
n.gain.setValueAtTime(e.muted ? 0 : 1, i.currentTime); | ||
const t = new m(n); | ||
return t.mute = (s, o = 10) => { | ||
const l = s ? 0 : 1; | ||
console.log("FUCK", s, l), n.gain.linearRampToValueAtTime(l, i.currentTime + o / 1e3); | ||
}, t; | ||
class AudioNodeMute extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.muted ??= false; | ||
this.gain.setValueAtTime(params.muted ? 0 : 1, this.context.currentTime); | ||
} | ||
mute(_mute, ms = 10) { | ||
const value = _mute ? 0 : 1; | ||
this.gain.linearRampToValueAtTime(value, this.context.currentTime + ms / 1e3); | ||
} | ||
} | ||
class B { | ||
constructor(i, e = {}) { | ||
e.gain ??= 0; | ||
const n = i.createGain(); | ||
n.gain.setValueAtTime(N(e.gain), i.currentTime); | ||
const t = new m(n); | ||
return t.adjustGainDecibel = (s, o = 10) => { | ||
t.input.gain.linearRampToValueAtTime(N(s), i.currentTime + o / 1e3); | ||
}, t; | ||
class AudioNodeGain extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.gain ??= 0; | ||
this.gain.setValueAtTime(dBFSToGain(params.gain), this.context.currentTime); | ||
} | ||
adjustGainDecibel(db, ms = 10) { | ||
this.gain.linearRampToValueAtTime(dBFSToGain(db), this.context.currentTime + ms / 1e3); | ||
} | ||
} | ||
class R { | ||
constructor(i, e = {}) { | ||
e.threshold ??= -16, e.attack ??= 3e-3, e.release ??= 0.4, e.knee ??= 3, e.ratio ??= 2; | ||
const n = i.createDynamicsCompressor(); | ||
return n.threshold.setValueAtTime(e.threshold, i.currentTime), n.knee.setValueAtTime(e.knee, i.currentTime), n.ratio.setValueAtTime(e.ratio, i.currentTime), n.attack.setValueAtTime(e.attack, i.currentTime), n.release.setValueAtTime(e.release, i.currentTime), new m(n); | ||
class AudioNodeCompressor extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -16; | ||
params.attack ??= 3e-3; | ||
params.release ??= 0.4; | ||
params.knee ??= 3; | ||
params.ratio ??= 2; | ||
const compressor = context.createDynamicsCompressor(); | ||
compressor.threshold.setValueAtTime(params.threshold, context.currentTime); | ||
compressor.knee.setValueAtTime(params.knee, context.currentTime); | ||
compressor.ratio.setValueAtTime(params.ratio, context.currentTime); | ||
compressor.attack.setValueAtTime(params.attack, context.currentTime); | ||
compressor.release.setValueAtTime(params.release, context.currentTime); | ||
this.chain(compressor); | ||
} | ||
} | ||
class z { | ||
constructor(i, e = {}) { | ||
e.threshold ??= -3, e.attack ??= 1e-3, e.release ??= 0.05, e.knee ??= 0, e.ratio ??= 20; | ||
const n = i.createDynamicsCompressor(); | ||
return n.threshold.setValueAtTime(e.threshold, i.currentTime), n.knee.setValueAtTime(e.knee, i.currentTime), n.ratio.setValueAtTime(e.ratio, i.currentTime), n.attack.setValueAtTime(e.attack, i.currentTime), n.release.setValueAtTime(e.release, i.currentTime), new m(n); | ||
class AudioNodeLimiter extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -3; | ||
params.attack ??= 1e-3; | ||
params.release ??= 0.05; | ||
params.knee ??= 0; | ||
params.ratio ??= 20; | ||
const limiter = context.createDynamicsCompressor(); | ||
limiter.threshold.setValueAtTime(params.threshold, context.currentTime); | ||
limiter.knee.setValueAtTime(params.knee, context.currentTime); | ||
limiter.ratio.setValueAtTime(params.ratio, context.currentTime); | ||
limiter.attack.setValueAtTime(params.attack, context.currentTime); | ||
limiter.release.setValueAtTime(params.release, context.currentTime); | ||
this.chain(limiter); | ||
} | ||
} | ||
class M { | ||
class AudioNodeEqualizer extends AudioNodeComposite { | ||
/* global BiquadFilterType */ | ||
constructor(i, e = {}) { | ||
e.bands ??= []; | ||
const n = []; | ||
if (e.bands.length < 1) | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.bands ??= []; | ||
const bands = []; | ||
if (params.bands.length < 1) | ||
throw new Error("at least one band has to be specified"); | ||
for (let t = 0; t < e.bands.length; t++) { | ||
const s = { | ||
for (let i = 0; i < params.bands.length; i++) { | ||
const options = { | ||
type: "peaking", | ||
freq: 64 * Math.pow(2, t), | ||
freq: 64 * Math.pow(2, i), | ||
q: 1, | ||
gain: 1, | ||
...e.bands[t] | ||
}, o = i.createBiquadFilter(); | ||
o.type = s.type, o.frequency.setValueAtTime(s.freq, i.currentTime), o.Q.setValueAtTime(s.q, i.currentTime), o.gain.setValueAtTime(s.gain, i.currentTime), n.push(o), t > 0 && n[t - 1].connect(n[t]); | ||
...params.bands[i] | ||
}; | ||
const band = context.createBiquadFilter(); | ||
band.type = options.type; | ||
band.frequency.setValueAtTime(options.freq, context.currentTime); | ||
band.Q.setValueAtTime(options.q, context.currentTime); | ||
band.gain.setValueAtTime(options.gain, context.currentTime); | ||
bands.push(band); | ||
if (i > 0) | ||
bands[i - 1].connect(bands[i]); | ||
} | ||
return e.bands.length === 1 ? new m(n[0]) : new m(n[0], n[n.length - 1]); | ||
if (params.bands.length === 1) | ||
this.chain(bands[0]); | ||
else | ||
this.chain(bands[0], bands[bands.length - 1]); | ||
} | ||
} | ||
class k { | ||
constructor(i, e = {}) { | ||
e.fftSize ??= 512, e.minDecibels ??= -94, e.maxDecibels ??= 0, e.smoothingTimeConstant ??= 0.8, e.intervalTime ??= 3, e.intervalCount ??= 100; | ||
const n = i.createAnalyser(); | ||
n.fftSize = e.fftSize, n.minDecibels = e.minDecibels, n.maxDecibels = e.maxDecibels, n.smoothingTimeConstant = e.smoothingTimeConstant; | ||
const t = { peak: -1 / 0, rms: -1 / 0, rmsM: -1 / 0, rmsS: -1 / 0 }, s = e.intervalCount; | ||
let o = !0, l = 0; | ||
const c = [], u = new Float32Array(n.fftSize), g = new Float32Array(n.frequencyBinCount), f = () => { | ||
n.getFloatTimeDomainData(u), n.getFloatFrequencyData(g); | ||
let r = 0, b = -1 / 0; | ||
for (let a = 0; a < u.length; a++) { | ||
const w = u[a] * u[a]; | ||
r += w, b < w && (b = w); | ||
class AudioNodeMeter extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.fftSize ??= 512; | ||
params.minDecibels ??= -94; | ||
params.maxDecibels ??= 0; | ||
params.smoothingTimeConstant ??= 0.8; | ||
params.intervalTime ??= 3; | ||
params.intervalCount ??= 100; | ||
const analyser = context.createAnalyser(); | ||
analyser.fftSize = params.fftSize; | ||
analyser.minDecibels = params.minDecibels; | ||
analyser.maxDecibels = params.maxDecibels; | ||
analyser.smoothingTimeConstant = params.smoothingTimeConstant; | ||
this.chain(analyser); | ||
const stat = { peak: -Infinity, rms: -Infinity, rmsM: -Infinity, rmsS: -Infinity }; | ||
const rmsLen = params.intervalCount; | ||
let rmsInit = true; | ||
let rmsPos = 0; | ||
const rmsArr = []; | ||
const dataT = new Float32Array(analyser.fftSize); | ||
const dataF = new Float32Array(analyser.frequencyBinCount); | ||
const measure = () => { | ||
analyser.getFloatTimeDomainData(dataT); | ||
analyser.getFloatFrequencyData(dataF); | ||
let rms = 0; | ||
let peak = -Infinity; | ||
for (let i = 0; i < dataT.length; i++) { | ||
const square = dataT[i] * dataT[i]; | ||
rms += square; | ||
if (peak < square) | ||
peak = square; | ||
} | ||
t.rms = V( | ||
C(Math.sqrt(r / u.length)), | ||
e.minDecibels, | ||
e.maxDecibels | ||
), t.peak = V( | ||
C(Math.sqrt(b)), | ||
e.minDecibels, | ||
e.maxDecibels | ||
), s > 0 && (l === s - 1 && o && (o = !1), l = (l + 1) % s, c[l] = t.rms, t.rmsM = v(c, o, l, s)); | ||
stat.rms = ensureWithin( | ||
gainTodBFS(Math.sqrt(rms / dataT.length)), | ||
params.minDecibels, | ||
params.maxDecibels | ||
); | ||
stat.peak = ensureWithin( | ||
gainTodBFS(Math.sqrt(peak)), | ||
params.minDecibels, | ||
params.maxDecibels | ||
); | ||
if (rmsLen > 0) { | ||
if (rmsPos === rmsLen - 1 && rmsInit) | ||
rmsInit = false; | ||
rmsPos = (rmsPos + 1) % rmsLen; | ||
rmsArr[rmsPos] = stat.rms; | ||
stat.rmsM = weightedAverage(rmsArr, rmsInit, rmsPos, rmsLen); | ||
} | ||
}; | ||
setInterval(f, e.intervalTime), f(); | ||
const h = new m(n); | ||
return h.dataT = () => u, h.dataF = () => g, h.stat = () => t, h; | ||
setInterval(measure, params.intervalTime); | ||
measure(); | ||
this.dataT = () => dataT; | ||
this.dataF = () => dataF; | ||
this.stat = () => stat; | ||
} | ||
} | ||
class F { | ||
constructor(i, e = {}) { | ||
e.threshold ??= -45, e.hysteresis ??= -3, e.reduction ??= -30, e.interval ??= 2, e.attack ??= 4, e.hold ??= 40, e.release ??= 200; | ||
const n = new k(i, { | ||
class AudioNodeGate extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -45; | ||
params.hysteresis ??= -3; | ||
params.reduction ??= -30; | ||
params.interval ??= 2; | ||
params.attack ??= 4; | ||
params.hold ??= 40; | ||
params.release ??= 200; | ||
const meter = new AudioNodeMeter(context, { | ||
fftSize: 512, | ||
@@ -209,128 +353,253 @@ minDecibels: -94, | ||
intervalCount: 25 | ||
}), t = i.createGain(); | ||
n.connect(t); | ||
let s = "open", o = NaN; | ||
const l = 1, c = N(e.reduction), u = () => { | ||
const g = n.stat().rmsM; | ||
s === "closed" ? g >= e.threshold && (s = "attack", t.gain.cancelScheduledValues(i.currentTime), t.gain.linearRampToValueAtTime(l, i.currentTime + e.attack / 1e3), Number.isNaN(o) || clearTimeout(o), o = setTimeout(() => { | ||
s = "open"; | ||
}, e.attack)) : s === "attack" ? g < e.threshold + e.hysteresis && (s = "release", t.gain.cancelScheduledValues(i.currentTime), t.gain.linearRampToValueAtTime(c, i.currentTime + e.release / 1e3), Number.isNaN(o) || clearTimeout(o), o = setTimeout(() => { | ||
s = "closed"; | ||
}, e.release)) : s === "open" ? g < e.threshold + e.hysteresis && (s = "hold", Number.isNaN(o) || clearTimeout(o), o = setTimeout(() => { | ||
s = "release", t.gain.cancelScheduledValues(i.currentTime), t.gain.linearRampToValueAtTime(c, i.currentTime + e.release / 1e3), o = setTimeout(() => { | ||
s = "closed"; | ||
}, e.release); | ||
}, e.hold)) : s === "hold" ? g >= e.threshold && (s = "open", Number.isNaN(o) || clearTimeout(o)) : s === "release" && g >= e.threshold && (s = "attack", t.gain.cancelScheduledValues(i.currentTime), t.gain.linearRampToValueAtTime(c, i.currentTime + e.attack / 1e3), Number.isNaN(o) || clearTimeout(o), o = setTimeout(() => { | ||
s = "open"; | ||
}, e.attack)), setTimeout(u, e.interval); | ||
}); | ||
const gain = context.createGain(); | ||
meter.connect(gain); | ||
let state = "open"; | ||
let timer = NaN; | ||
const gainOpen = 1; | ||
const gainClosed = dBFSToGain(params.reduction); | ||
const controlGain = () => { | ||
const level = meter.stat().rmsM; | ||
if (state === "closed") { | ||
if (level >= params.threshold) { | ||
state = "attack"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainOpen, context.currentTime + params.attack / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "open"; | ||
}, params.attack); | ||
} | ||
} else if (state === "attack") { | ||
if (level < params.threshold + params.hysteresis) { | ||
state = "release"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.release / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "closed"; | ||
}, params.release); | ||
} | ||
} else if (state === "open") { | ||
if (level < params.threshold + params.hysteresis) { | ||
state = "hold"; | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "release"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.release / 1e3); | ||
timer = setTimeout(() => { | ||
state = "closed"; | ||
}, params.release); | ||
}, params.hold); | ||
} | ||
} else if (state === "hold") { | ||
if (level >= params.threshold) { | ||
state = "open"; | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
} | ||
} else if (state === "release") { | ||
if (level >= params.threshold) { | ||
state = "attack"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.attack / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "open"; | ||
}, params.attack); | ||
} | ||
} | ||
setTimeout(controlGain, params.interval); | ||
}; | ||
return setTimeout(u, e.interval), new m(n, t); | ||
setTimeout(controlGain, params.interval); | ||
this.chain(meter, gain); | ||
} | ||
} | ||
class E { | ||
constructor(i, e = {}) { | ||
e.fftSize ??= 512, e.minDecibels ??= -60, e.maxDecibels ??= 0, e.smoothingTimeConstant ??= 0.8, e.intervalTime ??= 1e3 / 60, e.intervalCount ??= Math.round(300 / (1e3 / 60)), e.decibelBars ??= [-60, -45, -21, -6], e.colorBars ??= ["#306090", "#00b000", "#e0d000", "#e03030"], e.colorBarsDeactive ??= ["#606060", "#808080", "#a0a0a0", "#c0c0c0"], e.colorRMS ??= "#ffffff", e.colorBackground ??= "#000000", e.horizontal ??= !1; | ||
const n = new k(i, { | ||
fftSize: e.fftSize, | ||
minDecibels: e.minDecibels, | ||
maxDecibels: e.maxDecibels, | ||
smoothingTimeConstant: e.smoothingTimeConstant, | ||
intervalTime: e.intervalTime, | ||
intervalCount: e.intervalCount | ||
class AudioNodeAmplitude extends AudioNodeMeter { | ||
constructor(context, params = {}) { | ||
super(context, { | ||
fftSize: params.fftSize ??= 512, | ||
minDecibels: params.minDecibels ??= -60, | ||
maxDecibels: params.maxDecibels ??= 0, | ||
smoothingTimeConstant: params.smoothingTimeConstant ??= 0.8, | ||
intervalTime: params.intervalTime ??= 1e3 / 60, | ||
intervalCount: params.intervalCount ??= Math.round(300 / (1e3 / 60)) | ||
/* for 300ms RMS/m */ | ||
}); | ||
let t = [], s, o = !1; | ||
n.deactive = (c) => { | ||
o = c; | ||
this._canvases = []; | ||
this._timer = null; | ||
this._deactive = false; | ||
params.decibelBars ??= [-60, -45, -21, -6]; | ||
params.colorBars ??= ["#306090", "#00b000", "#e0d000", "#e03030"]; | ||
params.colorBarsDeactive ??= ["#606060", "#808080", "#a0a0a0", "#c0c0c0"]; | ||
params.colorRMS ??= "#ffffff"; | ||
params.colorBackground ??= "#000000"; | ||
params.horizontal ??= false; | ||
this._params = params; | ||
} | ||
/* draw spectrum into canvas */ | ||
_draw(canvas) { | ||
const peak = this.stat().peak; | ||
const rms = this.stat().rmsM; | ||
const canvasCtx = canvas.getContext("2d"); | ||
canvasCtx.fillStyle = this._params.colorBackground; | ||
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | ||
const colorBars = this._deactive ? this._params.colorBarsDeactive : this._params.colorBars; | ||
const scaleToCanvasUnits = (value) => { | ||
if (this._params.horizontal) | ||
return value / (this._params.maxDecibels - this._params.minDecibels) * canvas.width; | ||
else | ||
return value / (this._params.maxDecibels - this._params.minDecibels) * canvas.height; | ||
}; | ||
const l = (c) => { | ||
const u = n.stat().peak, g = n.stat().rmsM, f = c.getContext("2d"); | ||
f.fillStyle = e.colorBackground, f.fillRect(0, 0, c.width, c.height); | ||
const h = o ? e.colorBarsDeactive : e.colorBars, r = (T) => e.horizontal ? T / (e.maxDecibels - e.minDecibels) * c.width : T / (e.maxDecibels - e.minDecibels) * c.height, b = (T, A, I) => { | ||
const q = r(Math.abs(A - e.minDecibels)), S = r(Math.abs(A - T)); | ||
f.fillStyle = I, e.horizontal ? f.fillRect(q - S, 0, S, c.height) : f.fillRect(0, c.height - q, c.width, S); | ||
}, a = Math.min(e.decibelBars.length, h.length); | ||
let w = e.minDecibels, y = h[0]; | ||
for (let T = 0; T < a && !(u < e.decibelBars[T]); T++) { | ||
const A = e.decibelBars[T]; | ||
b(w, A, y), y = h[T], w = A; | ||
const drawSeg = (from2, to, color2) => { | ||
const b = scaleToCanvasUnits(Math.abs(to - this._params.minDecibels)); | ||
const h2 = scaleToCanvasUnits(Math.abs(to - from2)); | ||
canvasCtx.fillStyle = color2; | ||
if (this._params.horizontal) | ||
canvasCtx.fillRect(b - h2, 0, h2, canvas.height); | ||
else | ||
canvasCtx.fillRect(0, canvas.height - b, canvas.width, h2); | ||
}; | ||
const len = Math.min(this._params.decibelBars.length, colorBars.length); | ||
let from = this._params.minDecibels; | ||
let color = colorBars[0]; | ||
for (let i = 0; i < len; i++) { | ||
if (peak < this._params.decibelBars[i]) | ||
break; | ||
else { | ||
const to = this._params.decibelBars[i]; | ||
drawSeg(from, to, color); | ||
color = colorBars[i]; | ||
from = to; | ||
} | ||
b(w, u, y); | ||
const D = r(Math.abs(g - e.minDecibels)); | ||
f.fillStyle = e.colorRMS, e.horizontal ? f.fillRect(D - 1, 0, 1, c.height) : f.fillRect(0, c.height - D, c.width, 1); | ||
}; | ||
return n.draw = function(c) { | ||
t.push(c), t.length === 1 && (s = new _(() => { | ||
for (const u of t) | ||
l(u); | ||
})); | ||
}, n.undraw = function(c) { | ||
t = t.filter((u) => u !== c), t.length === 0 && s.clear(); | ||
}, n; | ||
} | ||
drawSeg(from, peak, color); | ||
const h = scaleToCanvasUnits(Math.abs(rms - this._params.minDecibels)); | ||
canvasCtx.fillStyle = this._params.colorRMS; | ||
if (this._params.horizontal) | ||
canvasCtx.fillRect(h - 1, 0, 1, canvas.height); | ||
else | ||
canvasCtx.fillRect(0, canvas.height - h, canvas.width, 1); | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw(canvas) { | ||
this._canvases.push(canvas); | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas2 of this._canvases) | ||
this._draw(canvas2); | ||
}); | ||
} | ||
} | ||
undraw(canvas) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas); | ||
if (this._canvases.length === 0) | ||
this._timer.clear(); | ||
} | ||
/* allow deactivation control */ | ||
deactive(_deactive) { | ||
this._deactive = _deactive; | ||
} | ||
} | ||
class O { | ||
constructor(i, e = {}) { | ||
e.fftSize ??= 8192, e.minDecibels ??= -144, e.maxDecibels ??= 0, e.smoothingTimeConstant ??= 0.8, e.intervalTime ??= 1e3 / 60, e.layers ??= [-120, -90, -60, -50, -40, -30, -20, -10], e.slices ??= [40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480], e.colorBackground ??= "#000000", e.colorBars ??= "#00cc00", e.colorLayers ??= "#009900", e.colorSlices ??= "#009900", e.logarithmic ??= !0; | ||
const n = new k(i, { | ||
fftSize: e.fftSize, | ||
minDecibels: e.minDecibels, | ||
maxDecibels: e.maxDecibels, | ||
smoothingTimeConstant: e.smoothingTimeConstant, | ||
intervalTime: e.intervalTime, | ||
class AudioNodeSpectrum extends AudioNodeMeter { | ||
constructor(context, params = {}) { | ||
super(context, { | ||
fftSize: params.fftSize ??= 8192, | ||
minDecibels: params.minDecibels ??= -144, | ||
maxDecibels: params.maxDecibels ??= 0, | ||
smoothingTimeConstant: params.smoothingTimeConstant ??= 0.8, | ||
intervalTime: params.intervalTime ??= 1e3 / 60, | ||
intervalCount: 0 | ||
}); | ||
let t = [], s; | ||
const o = (l) => { | ||
const c = n.dataF(), u = l.getContext("2d"); | ||
u.fillStyle = e.colorBackground, u.fillRect(0, 0, l.width, l.height); | ||
const g = (f) => f / (e.maxDecibels - e.minDecibels) * l.height; | ||
u.fillStyle = e.colorLayers; | ||
for (const f of e.layers) { | ||
const h = g(Math.abs(f - e.minDecibels)); | ||
u.fillRect(0, l.height - h, l.width, 1); | ||
this._canvases = []; | ||
this._timer = null; | ||
params.layers ??= [-120, -90, -60, -50, -40, -30, -20, -10]; | ||
params.slices ??= [40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480]; | ||
params.colorBackground ??= "#000000"; | ||
params.colorBars ??= "#00cc00"; | ||
params.colorLayers ??= "#009900"; | ||
params.colorSlices ??= "#009900"; | ||
params.logarithmic ??= true; | ||
this._params = params; | ||
} | ||
/* draw spectrum into canvas */ | ||
_draw(canvas) { | ||
const data = this.dataF(); | ||
const canvasCtx = canvas.getContext("2d"); | ||
canvasCtx.fillStyle = this._params.colorBackground; | ||
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | ||
const scaleToCanvasUnits = (value) => value / (this._params.maxDecibels - this._params.minDecibels) * canvas.height; | ||
canvasCtx.fillStyle = this._params.colorLayers; | ||
for (const layer of this._params.layers) { | ||
const barHeight = scaleToCanvasUnits(Math.abs(layer - this._params.minDecibels)); | ||
canvasCtx.fillRect(0, canvas.height - barHeight, canvas.width, 1); | ||
} | ||
canvasCtx.fillStyle = this._params.colorSlices; | ||
for (const slice of this._params.slices) { | ||
const x = Math.log2(slice / 20) * (canvas.width / 10); | ||
canvasCtx.fillRect(x, 0, 1, canvas.height); | ||
} | ||
canvasCtx.fillStyle = this._params.colorBars; | ||
if (this._params.logarithmic) { | ||
for (let posX = 0; posX < canvas.width; posX++) { | ||
const barWidth = 1; | ||
const f1 = 20 * Math.pow(2, posX * 10 / canvas.width); | ||
const f2 = 20 * Math.pow(2, (posX + 1) * 10 / canvas.width); | ||
const k1 = Math.round(f1 * (data.length / (20 * Math.pow(2, 10)))); | ||
let k2 = Math.round(f2 * (data.length / (20 * Math.pow(2, 10)))) - 1; | ||
if (k2 < k1) | ||
k2 = k1; | ||
let db = 0; | ||
for (let k = k1; k <= k2; k++) | ||
db += data[k]; | ||
db /= k2 + 1 - k1; | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels); | ||
canvasCtx.fillRect(posX, canvas.height - barHeight, barWidth, barHeight); | ||
} | ||
u.fillStyle = e.colorSlices; | ||
for (const f of e.slices) { | ||
const h = Math.log2(f / 20) * (l.width / 10); | ||
u.fillRect(h, 0, 1, l.height); | ||
} else { | ||
let posX = 0; | ||
const barWidth = canvas.width / data.length; | ||
for (let i = 0; i < data.length; i++) { | ||
const db = data[i]; | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels); | ||
canvasCtx.fillRect(posX, canvas.height - barHeight, barWidth - 0.5, barHeight); | ||
posX += barWidth; | ||
} | ||
if (u.fillStyle = e.colorBars, e.logarithmic) | ||
for (let f = 0; f < l.width; f++) { | ||
const r = 20 * Math.pow(2, f * 10 / l.width), b = 20 * Math.pow(2, (f + 1) * 10 / l.width), a = Math.round(r * (c.length / (20 * Math.pow(2, 10)))); | ||
let w = Math.round(b * (c.length / (20 * Math.pow(2, 10)))) - 1; | ||
w < a && (w = a); | ||
let y = 0; | ||
for (let T = a; T <= w; T++) | ||
y += c[T]; | ||
y /= w + 1 - a; | ||
const D = g(y - e.minDecibels); | ||
u.fillRect(f, l.height - D, 1, D); | ||
} | ||
else { | ||
let f = 0; | ||
const h = l.width / c.length; | ||
for (let r = 0; r < c.length; r++) { | ||
const b = c[r], a = g(b - e.minDecibels); | ||
u.fillRect(f, l.height - a, h - 0.5, a), f += h; | ||
} | ||
} | ||
}; | ||
return n.draw = function(l) { | ||
t.push(l), t.length === 1 && (s = new _(() => { | ||
for (const c of t) | ||
o(c); | ||
})); | ||
}, n.undraw = function(l) { | ||
t = t.filter((c) => c !== l), t.length === 0 && s.clear(); | ||
}, n; | ||
} | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw(canvas) { | ||
this._canvases.push(canvas); | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas2 of this._canvases) | ||
this._draw(canvas2); | ||
}); | ||
} | ||
} | ||
undraw(canvas) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas); | ||
if (this._canvases.length === 0) | ||
this._timer.clear(); | ||
} | ||
} | ||
class L { | ||
constructor(i, e = {}) { | ||
e.equalizer ??= !0, e.noisegate ??= !0, e.compressor ??= !0, e.limiter ??= !0, e.gain ??= 0; | ||
const n = []; | ||
let t = 0; | ||
const s = new j(i); | ||
if (n.push(s), e.equalizer) { | ||
const c = new M(i, { | ||
class AudioNodeVoice extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
this._compensate = 0; | ||
params.equalizer ??= true; | ||
params.noisegate ??= true; | ||
params.compressor ??= true; | ||
params.limiter ??= true; | ||
params.gain ??= 0; | ||
const nodes = []; | ||
this._mute = new AudioNodeMute(context); | ||
nodes.push(this._mute); | ||
if (params.equalizer) { | ||
const cutEQ = new AudioNodeEqualizer(context, { | ||
bands: [ | ||
@@ -345,10 +614,10 @@ { type: "highpass", freq: 80, q: 0.25 }, | ||
}); | ||
n.push(c); | ||
nodes.push(cutEQ); | ||
} | ||
if (e.noisegate) { | ||
const c = new F(i); | ||
n.push(c); | ||
if (params.noisegate) { | ||
const gate = new AudioNodeGate(context); | ||
nodes.push(gate); | ||
} | ||
if (e.compressor) { | ||
const c = new R(i, { | ||
if (params.compressor) { | ||
const comp = new AudioNodeCompressor(context, { | ||
threshold: -16, | ||
@@ -360,6 +629,7 @@ attack: 3e-3, | ||
}); | ||
n.push(c), t += -2; | ||
nodes.push(comp); | ||
this._compensate += -2; | ||
} | ||
if (e.equalizer) { | ||
const c = new M(i, { | ||
if (params.equalizer) { | ||
const boostEQ = new AudioNodeEqualizer(context, { | ||
bands: [ | ||
@@ -370,7 +640,9 @@ { type: "peaking", freq: 240, q: 0.75, gain: 3 }, | ||
}); | ||
n.push(c), t += -1; | ||
nodes.push(boostEQ); | ||
this._compensate += -1; | ||
} | ||
const o = new B(i); | ||
if (n.push(o), e.limiter) { | ||
const c = new z(i, { | ||
this._gain = new AudioNodeGain(context); | ||
nodes.push(this._gain); | ||
if (params.limiter) { | ||
const limiter = new AudioNodeLimiter(context, { | ||
threshold: -3, | ||
@@ -382,23 +654,34 @@ attack: 1e-3, | ||
}); | ||
n.push(c), t += -1; | ||
nodes.push(limiter); | ||
this._compensate += -1; | ||
} | ||
const l = m.factory(n); | ||
return l.mute = (c) => s.mute(c), l.adjustGainDecibel = (c, u = 10) => o.adjustGainDecibel(t + c, u), l.adjustGainDecibel(t + e.gain, 0), l; | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]); | ||
this.chain(nodes[0], nodes[nodes.length - 1]); | ||
this.adjustGainDecibel(params.gain, 0); | ||
} | ||
/* provide mute control */ | ||
mute(mute) { | ||
this._mute.mute(mute); | ||
} | ||
/* provide gain adjustment */ | ||
adjustGainDecibel(db, ms = 10) { | ||
this._gain.adjustGainDecibel(this._compensate + db, ms); | ||
} | ||
} | ||
const W = { | ||
AudioNodeComposite: m, | ||
AudioNodeNoise: G, | ||
AudioNodeGain: B, | ||
AudioNodeCompressor: R, | ||
AudioNodeLimiter: z, | ||
AudioNodeEqualizer: M, | ||
AudioNodeMeter: k, | ||
AudioNodeGate: F, | ||
AudioNodeAmplitude: E, | ||
AudioNodeSpectrum: O, | ||
AudioNodeVoice: L | ||
const audioNodeSuite = { | ||
AudioNodeComposite, | ||
AudioNodeNoise, | ||
AudioNodeGain, | ||
AudioNodeCompressor, | ||
AudioNodeLimiter, | ||
AudioNodeEqualizer, | ||
AudioNodeMeter, | ||
AudioNodeGate, | ||
AudioNodeAmplitude, | ||
AudioNodeSpectrum, | ||
AudioNodeVoice | ||
}; | ||
export { | ||
W as default | ||
audioNodeSuite as default | ||
}; |
@@ -1,1 +0,685 @@ | ||
(function(g,y){typeof exports=="object"&&typeof module<"u"?module.exports=y():typeof define=="function"&&define.amd?define(y):(g=typeof globalThis<"u"?globalThis:g||self,g.AudioNodeSuite=y())})(this,function(){"use strict";class g extends GainNode{constructor(i,e=i){if(super(i.context),this._bypass=!1,this._targets=[],typeof i!="object"||!(i instanceof AudioNode))throw new Error("input has to be a valid AudioNode");const n=i.context;let t;if(i.numberOfInputs>0)t=n.createGain(),t.connect(i);else{const o=n.createBufferSource();o.buffer=null,t=o}t._targets=[],t._bypass=!1,t._connect=t.connect;const s=(...o)=>{t._targets.push(o);let l;return t._bypass?i.numberOfInputs>0?l=t._connect(...o):l=i.connect(...o):l=e.connect(...o),l};return t.connect=s,t._disconnect=t.disconnect,t.disconnect=(...o)=>{let l;return t._bypass?i.numberOfInputs>0?l=t._disconnect(...o):l=i.connect(...o):l=e.disconnect(...o),t._targets=t._targets.filter(c=>{if(c.length!==o.length)return!0;for(let u=0;u<o.length;u++)if(c[u]!==o[u])return!0;return!1}),l},t.bypass=o=>{if(t._bypass!==o)if(t._bypass=o,t._bypass){i.numberOfInputs>0&&t._disconnect(i);for(const l of t._targets)e.disconnect(...l),t._connect(...l)}else{for(const l of t._targets)t._disconnect.apply(null,l),e.connect(...l);i.numberOfInputs>0&&t._connect(i)}},t.input=i,t.output=e,t}static factory(i){if(i.length<1)throw new Error("at least one node has to be given");for(let e=0;e<i.length-1;e++)i[e].connect(i[e+1]);return new g(i[0],i[i.length-1])}}const y=d=>Math.pow(10,d/20),q=d=>20*Math.log10(d),C=(d,i,e)=>(d<i?d=i:d>e&&(d=e),d),I=(d,i,e,n)=>{const t=d.length<n?d.length:n;let s=0,o=0;for(let l=0;l<=e;l++){const c=l+(t-e);s+=c*d[l],o+=c}if(!i)for(let l=e+1;l<t;l++){const c=l-(e+1);s+=c*d[l],o+=c}return s/=o,s};class V{constructor(i){if(this.timer=NaN,this.timerStop=!1,window!==void 0){const e=()=>{i(),this.timerStop||window.requestAnimationFrame(e)};window.requestAnimationFrame(e)}else this.timer=setInterval(()=>i(),1e3/60)}clear(){window!==void 0?this.timerStop=!0:clearTimeout(this.timer)}}class v{constructor(i,e={}){e.type??="pink",e.channels??=1;const n=5*i.sampleRate,t=i.createBuffer(e.channels,n,i.sampleRate);if(e.type==="white")for(let o=0;o<n;o++){const l=Math.random()*2-1;for(let c=0;c<e.channels;c++){const u=t.getChannelData(c);u[o]=l}}else if(e.type==="pink"){const o=[];for(let f=0;f<e.channels;f++){o[f]=new Float32Array(n);const r=[0,0,0,0,0,0,0];for(let T=0;T<n;T++){const a=Math.random()*2-1;r[0]=.99886*r[0]+a*.0555179,r[1]=.99332*r[1]+a*.0750759,r[2]=.969*r[2]+a*.153852,r[3]=.8665*r[3]+a*.3104856,r[4]=.55*r[4]+a*.5329522,r[5]=-.7616*r[5]-a*.016898,o[f][T]=r[0]+r[1]+r[2]+r[3]+r[4]+r[5]+r[6]+a*.5362,r[6]=a*.115926}}const l=[],c=[];for(let f=0;f<o.length;f++)l.push(Math.min(...o[f])),c.push(Math.max(...o[f]));const u=Math.min(...l),b=Math.max(...c),h=2147483647/2147483648/Math.max(Math.abs(u),b);for(let f=0;f<e.channels;f++)for(let r=0;r<n;r++)t.getChannelData(f)[r]=o[f][r]*h}const s=i.createBufferSource();return s.buffer=t,s.loop=!0,s.start(0),new g(s)}}class G{constructor(i,e={}){e.muted??=!1;const n=i.createGain();n.gain.setValueAtTime(e.muted?0:1,i.currentTime);const t=new g(n);return t.mute=(s,o=10)=>{const l=s?0:1;console.log("FUCK",s,l),n.gain.linearRampToValueAtTime(l,i.currentTime+o/1e3)},t}}class _{constructor(i,e={}){e.gain??=0;const n=i.createGain();n.gain.setValueAtTime(y(e.gain),i.currentTime);const t=new g(n);return t.adjustGainDecibel=(s,o=10)=>{t.input.gain.linearRampToValueAtTime(y(s),i.currentTime+o/1e3)},t}}class B{constructor(i,e={}){e.threshold??=-16,e.attack??=.003,e.release??=.4,e.knee??=3,e.ratio??=2;const n=i.createDynamicsCompressor();return n.threshold.setValueAtTime(e.threshold,i.currentTime),n.knee.setValueAtTime(e.knee,i.currentTime),n.ratio.setValueAtTime(e.ratio,i.currentTime),n.attack.setValueAtTime(e.attack,i.currentTime),n.release.setValueAtTime(e.release,i.currentTime),new g(n)}}class R{constructor(i,e={}){e.threshold??=-3,e.attack??=.001,e.release??=.05,e.knee??=0,e.ratio??=20;const n=i.createDynamicsCompressor();return n.threshold.setValueAtTime(e.threshold,i.currentTime),n.knee.setValueAtTime(e.knee,i.currentTime),n.ratio.setValueAtTime(e.ratio,i.currentTime),n.attack.setValueAtTime(e.attack,i.currentTime),n.release.setValueAtTime(e.release,i.currentTime),new g(n)}}class N{constructor(i,e={}){e.bands??=[];const n=[];if(e.bands.length<1)throw new Error("at least one band has to be specified");for(let t=0;t<e.bands.length;t++){const s={type:"peaking",freq:64*Math.pow(2,t),q:1,gain:1,...e.bands[t]},o=i.createBiquadFilter();o.type=s.type,o.frequency.setValueAtTime(s.freq,i.currentTime),o.Q.setValueAtTime(s.q,i.currentTime),o.gain.setValueAtTime(s.gain,i.currentTime),n.push(o),t>0&&n[t-1].connect(n[t])}return e.bands.length===1?new g(n[0]):new g(n[0],n[n.length-1])}}class S{constructor(i,e={}){e.fftSize??=512,e.minDecibels??=-94,e.maxDecibels??=0,e.smoothingTimeConstant??=.8,e.intervalTime??=3,e.intervalCount??=100;const n=i.createAnalyser();n.fftSize=e.fftSize,n.minDecibels=e.minDecibels,n.maxDecibels=e.maxDecibels,n.smoothingTimeConstant=e.smoothingTimeConstant;const t={peak:-1/0,rms:-1/0,rmsM:-1/0,rmsS:-1/0},s=e.intervalCount;let o=!0,l=0;const c=[],u=new Float32Array(n.fftSize),b=new Float32Array(n.frequencyBinCount),h=()=>{n.getFloatTimeDomainData(u),n.getFloatFrequencyData(b);let r=0,T=-1/0;for(let a=0;a<u.length;a++){const w=u[a]*u[a];r+=w,T<w&&(T=w)}t.rms=C(q(Math.sqrt(r/u.length)),e.minDecibels,e.maxDecibels),t.peak=C(q(Math.sqrt(T)),e.minDecibels,e.maxDecibels),s>0&&(l===s-1&&o&&(o=!1),l=(l+1)%s,c[l]=t.rms,t.rmsM=I(c,o,l,s))};setInterval(h,e.intervalTime),h();const f=new g(n);return f.dataT=()=>u,f.dataF=()=>b,f.stat=()=>t,f}}class z{constructor(i,e={}){e.threshold??=-45,e.hysteresis??=-3,e.reduction??=-30,e.interval??=2,e.attack??=4,e.hold??=40,e.release??=200;const n=new S(i,{fftSize:512,minDecibels:-94,maxDecibels:0,smoothingTimeConstant:.8,intervalTime:2,intervalCount:25}),t=i.createGain();n.connect(t);let s="open",o=NaN;const l=1,c=y(e.reduction),u=()=>{const b=n.stat().rmsM;s==="closed"?b>=e.threshold&&(s="attack",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(l,i.currentTime+e.attack/1e3),Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="open"},e.attack)):s==="attack"?b<e.threshold+e.hysteresis&&(s="release",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(c,i.currentTime+e.release/1e3),Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="closed"},e.release)):s==="open"?b<e.threshold+e.hysteresis&&(s="hold",Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="release",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(c,i.currentTime+e.release/1e3),o=setTimeout(()=>{s="closed"},e.release)},e.hold)):s==="hold"?b>=e.threshold&&(s="open",Number.isNaN(o)||clearTimeout(o)):s==="release"&&b>=e.threshold&&(s="attack",t.gain.cancelScheduledValues(i.currentTime),t.gain.linearRampToValueAtTime(c,i.currentTime+e.attack/1e3),Number.isNaN(o)||clearTimeout(o),o=setTimeout(()=>{s="open"},e.attack)),setTimeout(u,e.interval)};return setTimeout(u,e.interval),new g(n,t)}}class j{constructor(i,e={}){e.fftSize??=512,e.minDecibels??=-60,e.maxDecibels??=0,e.smoothingTimeConstant??=.8,e.intervalTime??=1e3/60,e.intervalCount??=Math.round(300/(1e3/60)),e.decibelBars??=[-60,-45,-21,-6],e.colorBars??=["#306090","#00b000","#e0d000","#e03030"],e.colorBarsDeactive??=["#606060","#808080","#a0a0a0","#c0c0c0"],e.colorRMS??="#ffffff",e.colorBackground??="#000000",e.horizontal??=!1;const n=new S(i,{fftSize:e.fftSize,minDecibels:e.minDecibels,maxDecibels:e.maxDecibels,smoothingTimeConstant:e.smoothingTimeConstant,intervalTime:e.intervalTime,intervalCount:e.intervalCount});let t=[],s,o=!1;n.deactive=c=>{o=c};const l=c=>{const u=n.stat().peak,b=n.stat().rmsM,h=c.getContext("2d");h.fillStyle=e.colorBackground,h.fillRect(0,0,c.width,c.height);const f=o?e.colorBarsDeactive:e.colorBars,r=m=>e.horizontal?m/(e.maxDecibels-e.minDecibels)*c.width:m/(e.maxDecibels-e.minDecibels)*c.height,T=(m,k,L)=>{const F=r(Math.abs(k-e.minDecibels)),M=r(Math.abs(k-m));h.fillStyle=L,e.horizontal?h.fillRect(F-M,0,M,c.height):h.fillRect(0,c.height-F,c.width,M)},a=Math.min(e.decibelBars.length,f.length);let w=e.minDecibels,D=f[0];for(let m=0;m<a&&!(u<e.decibelBars[m]);m++){const k=e.decibelBars[m];T(w,k,D),D=f[m],w=k}T(w,u,D);const A=r(Math.abs(b-e.minDecibels));h.fillStyle=e.colorRMS,e.horizontal?h.fillRect(A-1,0,1,c.height):h.fillRect(0,c.height-A,c.width,1)};return n.draw=function(c){t.push(c),t.length===1&&(s=new V(()=>{for(const u of t)l(u)}))},n.undraw=function(c){t=t.filter(u=>u!==c),t.length===0&&s.clear()},n}}class E{constructor(i,e={}){e.fftSize??=8192,e.minDecibels??=-144,e.maxDecibels??=0,e.smoothingTimeConstant??=.8,e.intervalTime??=1e3/60,e.layers??=[-120,-90,-60,-50,-40,-30,-20,-10],e.slices??=[40,80,160,320,640,1280,2560,5120,10240,20480],e.colorBackground??="#000000",e.colorBars??="#00cc00",e.colorLayers??="#009900",e.colorSlices??="#009900",e.logarithmic??=!0;const n=new S(i,{fftSize:e.fftSize,minDecibels:e.minDecibels,maxDecibels:e.maxDecibels,smoothingTimeConstant:e.smoothingTimeConstant,intervalTime:e.intervalTime,intervalCount:0});let t=[],s;const o=l=>{const c=n.dataF(),u=l.getContext("2d");u.fillStyle=e.colorBackground,u.fillRect(0,0,l.width,l.height);const b=h=>h/(e.maxDecibels-e.minDecibels)*l.height;u.fillStyle=e.colorLayers;for(const h of e.layers){const f=b(Math.abs(h-e.minDecibels));u.fillRect(0,l.height-f,l.width,1)}u.fillStyle=e.colorSlices;for(const h of e.slices){const f=Math.log2(h/20)*(l.width/10);u.fillRect(f,0,1,l.height)}if(u.fillStyle=e.colorBars,e.logarithmic)for(let h=0;h<l.width;h++){const r=20*Math.pow(2,h*10/l.width),T=20*Math.pow(2,(h+1)*10/l.width),a=Math.round(r*(c.length/(20*Math.pow(2,10))));let w=Math.round(T*(c.length/(20*Math.pow(2,10))))-1;w<a&&(w=a);let D=0;for(let m=a;m<=w;m++)D+=c[m];D/=w+1-a;const A=b(D-e.minDecibels);u.fillRect(h,l.height-A,1,A)}else{let h=0;const f=l.width/c.length;for(let r=0;r<c.length;r++){const T=c[r],a=b(T-e.minDecibels);u.fillRect(h,l.height-a,f-.5,a),h+=f}}};return n.draw=function(l){t.push(l),t.length===1&&(s=new V(()=>{for(const c of t)o(c)}))},n.undraw=function(l){t=t.filter(c=>c!==l),t.length===0&&s.clear()},n}}class O{constructor(i,e={}){e.equalizer??=!0,e.noisegate??=!0,e.compressor??=!0,e.limiter??=!0,e.gain??=0;const n=[];let t=0;const s=new G(i);if(n.push(s),e.equalizer){const c=new N(i,{bands:[{type:"highpass",freq:80,q:.25},{type:"highpass",freq:80,q:.5},{type:"notch",freq:50,q:.25},{type:"notch",freq:960,q:4},{type:"lowpass",freq:20480,q:.5},{type:"lowpass",freq:20480,q:.25}]});n.push(c)}if(e.noisegate){const c=new z(i);n.push(c)}if(e.compressor){const c=new B(i,{threshold:-16,attack:.003,release:.4,knee:3,ratio:2});n.push(c),t+=-2}if(e.equalizer){const c=new N(i,{bands:[{type:"peaking",freq:240,q:.75,gain:3},{type:"highshelf",freq:3840,q:.75,gain:6}]});n.push(c),t+=-1}const o=new _(i);if(n.push(o),e.limiter){const c=new R(i,{threshold:-3,attack:.001,release:.05,knee:0,ratio:20});n.push(c),t+=-1}const l=g.factory(n);return l.mute=c=>s.mute(c),l.adjustGainDecibel=(c,u=10)=>o.adjustGainDecibel(t+c,u),l.adjustGainDecibel(t+e.gain,0),l}}return{AudioNodeComposite:g,AudioNodeNoise:v,AudioNodeGain:_,AudioNodeCompressor:B,AudioNodeLimiter:R,AudioNodeEqualizer:N,AudioNodeMeter:S,AudioNodeGate:z,AudioNodeAmplitude:j,AudioNodeSpectrum:E,AudioNodeVoice:O}}); | ||
(function(global, factory) { | ||
typeof exports === "object" && typeof module !== "undefined" ? module.exports = factory() : typeof define === "function" && define.amd ? define(factory) : (global = typeof globalThis !== "undefined" ? globalThis : global || self, global.AudioNodeSuite = factory()); | ||
})(this, function() { | ||
"use strict"; | ||
class AudioNodeComposite extends GainNode { | ||
/* tracked connected targets */ | ||
/* just pass-through construction */ | ||
constructor(context) { | ||
super(context); | ||
this.input = null; | ||
this.output = null; | ||
this._bypass = false; | ||
this._targets = []; | ||
} | ||
/* configure input/output chain */ | ||
chain(input, output = input) { | ||
if (typeof input !== "object" || !(input instanceof AudioNode)) | ||
throw new Error("input has to be a valid AudioNode"); | ||
this.input = input; | ||
this.output = output; | ||
if (this._bypass) { | ||
for (const _target of this._targets) | ||
super.connect(..._target); | ||
} else { | ||
for (const _target of this._targets) { | ||
super.disconnect(..._target); | ||
this.output.connect(..._target); | ||
} | ||
if (this.input.numberOfInputs > 0) | ||
super.connect(this.input); | ||
} | ||
} | ||
/* provide an overloaded Web API "connect" method */ | ||
connect(...args) { | ||
this._targets.push(args); | ||
let result; | ||
if (this._bypass || this.output === null) | ||
result = super.connect(...args); | ||
else | ||
result = this.output.connect(...args); | ||
return result; | ||
} | ||
/* provide an overloaded Web API "disconnect" method */ | ||
disconnect(...args) { | ||
let result; | ||
if (this._bypass || this.output === null) | ||
result = super.disconnect(...args); | ||
else | ||
result = this.output.disconnect(...args); | ||
this._targets = this._targets.filter((_target) => { | ||
if (_target.length !== args.length) | ||
return true; | ||
for (let i = 0; i < args.length; i++) | ||
if (_target[i] !== args[i]) | ||
return true; | ||
return false; | ||
}); | ||
return result; | ||
} | ||
/* provide a custom "bypass" method */ | ||
bypass(bypass) { | ||
if (this._bypass === bypass) | ||
return; | ||
this._bypass = bypass; | ||
if (this._bypass) { | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
super.disconnect(this.input); | ||
for (const _target of this._targets) { | ||
if (this.output !== null) | ||
this.output.disconnect(..._target); | ||
super.connect(..._target); | ||
} | ||
} else { | ||
for (const _target of this._targets) { | ||
super.disconnect(..._target); | ||
if (this.output !== null) | ||
this.output.connect(..._target); | ||
} | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
super.connect(this.input); | ||
} | ||
} | ||
/* provide convenient factory method */ | ||
static factory(context, nodes) { | ||
if (nodes.length < 1) | ||
throw new Error("at least one node has to be given"); | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]); | ||
const composite = new AudioNodeComposite(context); | ||
composite.chain(nodes[0], nodes[nodes.length - 1]); | ||
return composite; | ||
} | ||
} | ||
const dBFSToGain = (dbfs) => Math.pow(10, dbfs / 20); | ||
const gainTodBFS = (gain) => 20 * Math.log10(gain); | ||
const ensureWithin = (val, min, max) => { | ||
if (val < min) | ||
val = min; | ||
else if (val > max) | ||
val = max; | ||
return val; | ||
}; | ||
const weightedAverage = (arr, init, pos, len) => { | ||
const max = arr.length < len ? arr.length : len; | ||
let avg = 0; | ||
let num = 0; | ||
for (let i = 0; i <= pos; i++) { | ||
const w = i + (max - pos); | ||
avg += w * arr[i]; | ||
num += w; | ||
} | ||
if (!init) { | ||
for (let i = pos + 1; i < max; i++) { | ||
const w = i - (pos + 1); | ||
avg += w * arr[i]; | ||
num += w; | ||
} | ||
} | ||
avg /= num; | ||
return avg; | ||
}; | ||
class AnimationFrameTimer { | ||
constructor(cb) { | ||
this.timer = NaN; | ||
this.timerStop = false; | ||
if (window !== void 0) { | ||
const once = () => { | ||
cb(); | ||
if (!this.timerStop) | ||
window.requestAnimationFrame(once); | ||
}; | ||
window.requestAnimationFrame(once); | ||
} else | ||
this.timer = setInterval(() => cb(), 1e3 / 60); | ||
} | ||
clear() { | ||
if (window !== void 0) | ||
this.timerStop = true; | ||
else | ||
clearTimeout(this.timer); | ||
} | ||
} | ||
class AudioNodeNoise extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.type ??= "pink"; | ||
params.channels ??= 1; | ||
const lengthInSamples = 5 * context.sampleRate; | ||
const buffer = context.createBuffer(params.channels, lengthInSamples, context.sampleRate); | ||
if (params.type === "white") { | ||
for (let i = 0; i < lengthInSamples; i++) { | ||
const rand = Math.random() * 2 - 1; | ||
for (let j = 0; j < params.channels; j++) { | ||
const data = buffer.getChannelData(j); | ||
data[i] = rand; | ||
} | ||
} | ||
} else if (params.type === "pink") { | ||
const pink = []; | ||
for (let i = 0; i < params.channels; i++) { | ||
pink[i] = new Float32Array(lengthInSamples); | ||
const b = [0, 0, 0, 0, 0, 0, 0]; | ||
for (let j = 0; j < lengthInSamples; j++) { | ||
const white = Math.random() * 2 - 1; | ||
b[0] = 0.99886 * b[0] + white * 0.0555179; | ||
b[1] = 0.99332 * b[1] + white * 0.0750759; | ||
b[2] = 0.969 * b[2] + white * 0.153852; | ||
b[3] = 0.8665 * b[3] + white * 0.3104856; | ||
b[4] = 0.55 * b[4] + white * 0.5329522; | ||
b[5] = -0.7616 * b[5] - white * 0.016898; | ||
pink[i][j] = b[0] + b[1] + b[2] + b[3] + b[4] + b[5] + b[6] + white * 0.5362; | ||
b[6] = white * 0.115926; | ||
} | ||
} | ||
const minA = []; | ||
const maxA = []; | ||
for (let i = 0; i < pink.length; i++) { | ||
minA.push(Math.min(...pink[i])); | ||
maxA.push(Math.max(...pink[i])); | ||
} | ||
const min = Math.min(...minA); | ||
const max = Math.max(...maxA); | ||
const coefficient = 2147483647 / 2147483648 / Math.max(Math.abs(min), max); | ||
for (let i = 0; i < params.channels; i++) | ||
for (let j = 0; j < lengthInSamples; j++) | ||
buffer.getChannelData(i)[j] = pink[i][j] * coefficient; | ||
} | ||
const bs = context.createBufferSource(); | ||
bs.channelCount = params.channels; | ||
bs.buffer = buffer; | ||
bs.loop = true; | ||
bs.start(0); | ||
this.chain(bs); | ||
} | ||
} | ||
class AudioNodeMute extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.muted ??= false; | ||
this.gain.setValueAtTime(params.muted ? 0 : 1, this.context.currentTime); | ||
} | ||
mute(_mute, ms = 10) { | ||
const value = _mute ? 0 : 1; | ||
this.gain.linearRampToValueAtTime(value, this.context.currentTime + ms / 1e3); | ||
} | ||
} | ||
class AudioNodeGain extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.gain ??= 0; | ||
this.gain.setValueAtTime(dBFSToGain(params.gain), this.context.currentTime); | ||
} | ||
adjustGainDecibel(db, ms = 10) { | ||
this.gain.linearRampToValueAtTime(dBFSToGain(db), this.context.currentTime + ms / 1e3); | ||
} | ||
} | ||
class AudioNodeCompressor extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -16; | ||
params.attack ??= 3e-3; | ||
params.release ??= 0.4; | ||
params.knee ??= 3; | ||
params.ratio ??= 2; | ||
const compressor = context.createDynamicsCompressor(); | ||
compressor.threshold.setValueAtTime(params.threshold, context.currentTime); | ||
compressor.knee.setValueAtTime(params.knee, context.currentTime); | ||
compressor.ratio.setValueAtTime(params.ratio, context.currentTime); | ||
compressor.attack.setValueAtTime(params.attack, context.currentTime); | ||
compressor.release.setValueAtTime(params.release, context.currentTime); | ||
this.chain(compressor); | ||
} | ||
} | ||
class AudioNodeLimiter extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -3; | ||
params.attack ??= 1e-3; | ||
params.release ??= 0.05; | ||
params.knee ??= 0; | ||
params.ratio ??= 20; | ||
const limiter = context.createDynamicsCompressor(); | ||
limiter.threshold.setValueAtTime(params.threshold, context.currentTime); | ||
limiter.knee.setValueAtTime(params.knee, context.currentTime); | ||
limiter.ratio.setValueAtTime(params.ratio, context.currentTime); | ||
limiter.attack.setValueAtTime(params.attack, context.currentTime); | ||
limiter.release.setValueAtTime(params.release, context.currentTime); | ||
this.chain(limiter); | ||
} | ||
} | ||
class AudioNodeEqualizer extends AudioNodeComposite { | ||
/* global BiquadFilterType */ | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.bands ??= []; | ||
const bands = []; | ||
if (params.bands.length < 1) | ||
throw new Error("at least one band has to be specified"); | ||
for (let i = 0; i < params.bands.length; i++) { | ||
const options = { | ||
type: "peaking", | ||
freq: 64 * Math.pow(2, i), | ||
q: 1, | ||
gain: 1, | ||
...params.bands[i] | ||
}; | ||
const band = context.createBiquadFilter(); | ||
band.type = options.type; | ||
band.frequency.setValueAtTime(options.freq, context.currentTime); | ||
band.Q.setValueAtTime(options.q, context.currentTime); | ||
band.gain.setValueAtTime(options.gain, context.currentTime); | ||
bands.push(band); | ||
if (i > 0) | ||
bands[i - 1].connect(bands[i]); | ||
} | ||
if (params.bands.length === 1) | ||
this.chain(bands[0]); | ||
else | ||
this.chain(bands[0], bands[bands.length - 1]); | ||
} | ||
} | ||
class AudioNodeMeter extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.fftSize ??= 512; | ||
params.minDecibels ??= -94; | ||
params.maxDecibels ??= 0; | ||
params.smoothingTimeConstant ??= 0.8; | ||
params.intervalTime ??= 3; | ||
params.intervalCount ??= 100; | ||
const analyser = context.createAnalyser(); | ||
analyser.fftSize = params.fftSize; | ||
analyser.minDecibels = params.minDecibels; | ||
analyser.maxDecibels = params.maxDecibels; | ||
analyser.smoothingTimeConstant = params.smoothingTimeConstant; | ||
this.chain(analyser); | ||
const stat = { peak: -Infinity, rms: -Infinity, rmsM: -Infinity, rmsS: -Infinity }; | ||
const rmsLen = params.intervalCount; | ||
let rmsInit = true; | ||
let rmsPos = 0; | ||
const rmsArr = []; | ||
const dataT = new Float32Array(analyser.fftSize); | ||
const dataF = new Float32Array(analyser.frequencyBinCount); | ||
const measure = () => { | ||
analyser.getFloatTimeDomainData(dataT); | ||
analyser.getFloatFrequencyData(dataF); | ||
let rms = 0; | ||
let peak = -Infinity; | ||
for (let i = 0; i < dataT.length; i++) { | ||
const square = dataT[i] * dataT[i]; | ||
rms += square; | ||
if (peak < square) | ||
peak = square; | ||
} | ||
stat.rms = ensureWithin( | ||
gainTodBFS(Math.sqrt(rms / dataT.length)), | ||
params.minDecibels, | ||
params.maxDecibels | ||
); | ||
stat.peak = ensureWithin( | ||
gainTodBFS(Math.sqrt(peak)), | ||
params.minDecibels, | ||
params.maxDecibels | ||
); | ||
if (rmsLen > 0) { | ||
if (rmsPos === rmsLen - 1 && rmsInit) | ||
rmsInit = false; | ||
rmsPos = (rmsPos + 1) % rmsLen; | ||
rmsArr[rmsPos] = stat.rms; | ||
stat.rmsM = weightedAverage(rmsArr, rmsInit, rmsPos, rmsLen); | ||
} | ||
}; | ||
setInterval(measure, params.intervalTime); | ||
measure(); | ||
this.dataT = () => dataT; | ||
this.dataF = () => dataF; | ||
this.stat = () => stat; | ||
} | ||
} | ||
class AudioNodeGate extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
params.threshold ??= -45; | ||
params.hysteresis ??= -3; | ||
params.reduction ??= -30; | ||
params.interval ??= 2; | ||
params.attack ??= 4; | ||
params.hold ??= 40; | ||
params.release ??= 200; | ||
const meter = new AudioNodeMeter(context, { | ||
fftSize: 512, | ||
minDecibels: -94, | ||
maxDecibels: 0, | ||
smoothingTimeConstant: 0.8, | ||
intervalTime: 2, | ||
intervalCount: 25 | ||
}); | ||
const gain = context.createGain(); | ||
meter.connect(gain); | ||
let state = "open"; | ||
let timer = NaN; | ||
const gainOpen = 1; | ||
const gainClosed = dBFSToGain(params.reduction); | ||
const controlGain = () => { | ||
const level = meter.stat().rmsM; | ||
if (state === "closed") { | ||
if (level >= params.threshold) { | ||
state = "attack"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainOpen, context.currentTime + params.attack / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "open"; | ||
}, params.attack); | ||
} | ||
} else if (state === "attack") { | ||
if (level < params.threshold + params.hysteresis) { | ||
state = "release"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.release / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "closed"; | ||
}, params.release); | ||
} | ||
} else if (state === "open") { | ||
if (level < params.threshold + params.hysteresis) { | ||
state = "hold"; | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "release"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.release / 1e3); | ||
timer = setTimeout(() => { | ||
state = "closed"; | ||
}, params.release); | ||
}, params.hold); | ||
} | ||
} else if (state === "hold") { | ||
if (level >= params.threshold) { | ||
state = "open"; | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
} | ||
} else if (state === "release") { | ||
if (level >= params.threshold) { | ||
state = "attack"; | ||
gain.gain.cancelScheduledValues(context.currentTime); | ||
gain.gain.linearRampToValueAtTime(gainClosed, context.currentTime + params.attack / 1e3); | ||
if (!Number.isNaN(timer)) | ||
clearTimeout(timer); | ||
timer = setTimeout(() => { | ||
state = "open"; | ||
}, params.attack); | ||
} | ||
} | ||
setTimeout(controlGain, params.interval); | ||
}; | ||
setTimeout(controlGain, params.interval); | ||
this.chain(meter, gain); | ||
} | ||
} | ||
class AudioNodeAmplitude extends AudioNodeMeter { | ||
constructor(context, params = {}) { | ||
super(context, { | ||
fftSize: params.fftSize ??= 512, | ||
minDecibels: params.minDecibels ??= -60, | ||
maxDecibels: params.maxDecibels ??= 0, | ||
smoothingTimeConstant: params.smoothingTimeConstant ??= 0.8, | ||
intervalTime: params.intervalTime ??= 1e3 / 60, | ||
intervalCount: params.intervalCount ??= Math.round(300 / (1e3 / 60)) | ||
/* for 300ms RMS/m */ | ||
}); | ||
this._canvases = []; | ||
this._timer = null; | ||
this._deactive = false; | ||
params.decibelBars ??= [-60, -45, -21, -6]; | ||
params.colorBars ??= ["#306090", "#00b000", "#e0d000", "#e03030"]; | ||
params.colorBarsDeactive ??= ["#606060", "#808080", "#a0a0a0", "#c0c0c0"]; | ||
params.colorRMS ??= "#ffffff"; | ||
params.colorBackground ??= "#000000"; | ||
params.horizontal ??= false; | ||
this._params = params; | ||
} | ||
/* draw spectrum into canvas */ | ||
_draw(canvas) { | ||
const peak = this.stat().peak; | ||
const rms = this.stat().rmsM; | ||
const canvasCtx = canvas.getContext("2d"); | ||
canvasCtx.fillStyle = this._params.colorBackground; | ||
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | ||
const colorBars = this._deactive ? this._params.colorBarsDeactive : this._params.colorBars; | ||
const scaleToCanvasUnits = (value) => { | ||
if (this._params.horizontal) | ||
return value / (this._params.maxDecibels - this._params.minDecibels) * canvas.width; | ||
else | ||
return value / (this._params.maxDecibels - this._params.minDecibels) * canvas.height; | ||
}; | ||
const drawSeg = (from2, to, color2) => { | ||
const b = scaleToCanvasUnits(Math.abs(to - this._params.minDecibels)); | ||
const h2 = scaleToCanvasUnits(Math.abs(to - from2)); | ||
canvasCtx.fillStyle = color2; | ||
if (this._params.horizontal) | ||
canvasCtx.fillRect(b - h2, 0, h2, canvas.height); | ||
else | ||
canvasCtx.fillRect(0, canvas.height - b, canvas.width, h2); | ||
}; | ||
const len = Math.min(this._params.decibelBars.length, colorBars.length); | ||
let from = this._params.minDecibels; | ||
let color = colorBars[0]; | ||
for (let i = 0; i < len; i++) { | ||
if (peak < this._params.decibelBars[i]) | ||
break; | ||
else { | ||
const to = this._params.decibelBars[i]; | ||
drawSeg(from, to, color); | ||
color = colorBars[i]; | ||
from = to; | ||
} | ||
} | ||
drawSeg(from, peak, color); | ||
const h = scaleToCanvasUnits(Math.abs(rms - this._params.minDecibels)); | ||
canvasCtx.fillStyle = this._params.colorRMS; | ||
if (this._params.horizontal) | ||
canvasCtx.fillRect(h - 1, 0, 1, canvas.height); | ||
else | ||
canvasCtx.fillRect(0, canvas.height - h, canvas.width, 1); | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw(canvas) { | ||
this._canvases.push(canvas); | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas2 of this._canvases) | ||
this._draw(canvas2); | ||
}); | ||
} | ||
} | ||
undraw(canvas) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas); | ||
if (this._canvases.length === 0) | ||
this._timer.clear(); | ||
} | ||
/* allow deactivation control */ | ||
deactive(_deactive) { | ||
this._deactive = _deactive; | ||
} | ||
} | ||
class AudioNodeSpectrum extends AudioNodeMeter { | ||
constructor(context, params = {}) { | ||
super(context, { | ||
fftSize: params.fftSize ??= 8192, | ||
minDecibels: params.minDecibels ??= -144, | ||
maxDecibels: params.maxDecibels ??= 0, | ||
smoothingTimeConstant: params.smoothingTimeConstant ??= 0.8, | ||
intervalTime: params.intervalTime ??= 1e3 / 60, | ||
intervalCount: 0 | ||
}); | ||
this._canvases = []; | ||
this._timer = null; | ||
params.layers ??= [-120, -90, -60, -50, -40, -30, -20, -10]; | ||
params.slices ??= [40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480]; | ||
params.colorBackground ??= "#000000"; | ||
params.colorBars ??= "#00cc00"; | ||
params.colorLayers ??= "#009900"; | ||
params.colorSlices ??= "#009900"; | ||
params.logarithmic ??= true; | ||
this._params = params; | ||
} | ||
/* draw spectrum into canvas */ | ||
_draw(canvas) { | ||
const data = this.dataF(); | ||
const canvasCtx = canvas.getContext("2d"); | ||
canvasCtx.fillStyle = this._params.colorBackground; | ||
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | ||
const scaleToCanvasUnits = (value) => value / (this._params.maxDecibels - this._params.minDecibels) * canvas.height; | ||
canvasCtx.fillStyle = this._params.colorLayers; | ||
for (const layer of this._params.layers) { | ||
const barHeight = scaleToCanvasUnits(Math.abs(layer - this._params.minDecibels)); | ||
canvasCtx.fillRect(0, canvas.height - barHeight, canvas.width, 1); | ||
} | ||
canvasCtx.fillStyle = this._params.colorSlices; | ||
for (const slice of this._params.slices) { | ||
const x = Math.log2(slice / 20) * (canvas.width / 10); | ||
canvasCtx.fillRect(x, 0, 1, canvas.height); | ||
} | ||
canvasCtx.fillStyle = this._params.colorBars; | ||
if (this._params.logarithmic) { | ||
for (let posX = 0; posX < canvas.width; posX++) { | ||
const barWidth = 1; | ||
const f1 = 20 * Math.pow(2, posX * 10 / canvas.width); | ||
const f2 = 20 * Math.pow(2, (posX + 1) * 10 / canvas.width); | ||
const k1 = Math.round(f1 * (data.length / (20 * Math.pow(2, 10)))); | ||
let k2 = Math.round(f2 * (data.length / (20 * Math.pow(2, 10)))) - 1; | ||
if (k2 < k1) | ||
k2 = k1; | ||
let db = 0; | ||
for (let k = k1; k <= k2; k++) | ||
db += data[k]; | ||
db /= k2 + 1 - k1; | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels); | ||
canvasCtx.fillRect(posX, canvas.height - barHeight, barWidth, barHeight); | ||
} | ||
} else { | ||
let posX = 0; | ||
const barWidth = canvas.width / data.length; | ||
for (let i = 0; i < data.length; i++) { | ||
const db = data[i]; | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels); | ||
canvasCtx.fillRect(posX, canvas.height - barHeight, barWidth - 0.5, barHeight); | ||
posX += barWidth; | ||
} | ||
} | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw(canvas) { | ||
this._canvases.push(canvas); | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas2 of this._canvases) | ||
this._draw(canvas2); | ||
}); | ||
} | ||
} | ||
undraw(canvas) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas); | ||
if (this._canvases.length === 0) | ||
this._timer.clear(); | ||
} | ||
} | ||
class AudioNodeVoice extends AudioNodeComposite { | ||
constructor(context, params = {}) { | ||
super(context); | ||
this._compensate = 0; | ||
params.equalizer ??= true; | ||
params.noisegate ??= true; | ||
params.compressor ??= true; | ||
params.limiter ??= true; | ||
params.gain ??= 0; | ||
const nodes = []; | ||
this._mute = new AudioNodeMute(context); | ||
nodes.push(this._mute); | ||
if (params.equalizer) { | ||
const cutEQ = new AudioNodeEqualizer(context, { | ||
bands: [ | ||
{ type: "highpass", freq: 80, q: 0.25 }, | ||
{ type: "highpass", freq: 80, q: 0.5 }, | ||
{ type: "notch", freq: 50, q: 0.25 }, | ||
{ type: "notch", freq: 960, q: 4 }, | ||
{ type: "lowpass", freq: 20480, q: 0.5 }, | ||
{ type: "lowpass", freq: 20480, q: 0.25 } | ||
] | ||
}); | ||
nodes.push(cutEQ); | ||
} | ||
if (params.noisegate) { | ||
const gate = new AudioNodeGate(context); | ||
nodes.push(gate); | ||
} | ||
if (params.compressor) { | ||
const comp = new AudioNodeCompressor(context, { | ||
threshold: -16, | ||
attack: 3e-3, | ||
release: 0.4, | ||
knee: 3, | ||
ratio: 2 | ||
}); | ||
nodes.push(comp); | ||
this._compensate += -2; | ||
} | ||
if (params.equalizer) { | ||
const boostEQ = new AudioNodeEqualizer(context, { | ||
bands: [ | ||
{ type: "peaking", freq: 240, q: 0.75, gain: 3 }, | ||
{ type: "highshelf", freq: 3840, q: 0.75, gain: 6 } | ||
] | ||
}); | ||
nodes.push(boostEQ); | ||
this._compensate += -1; | ||
} | ||
this._gain = new AudioNodeGain(context); | ||
nodes.push(this._gain); | ||
if (params.limiter) { | ||
const limiter = new AudioNodeLimiter(context, { | ||
threshold: -3, | ||
attack: 1e-3, | ||
release: 0.05, | ||
knee: 0, | ||
ratio: 20 | ||
}); | ||
nodes.push(limiter); | ||
this._compensate += -1; | ||
} | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]); | ||
this.chain(nodes[0], nodes[nodes.length - 1]); | ||
this.adjustGainDecibel(params.gain, 0); | ||
} | ||
/* provide mute control */ | ||
mute(mute) { | ||
this._mute.mute(mute); | ||
} | ||
/* provide gain adjustment */ | ||
adjustGainDecibel(db, ms = 10) { | ||
this._gain.adjustGainDecibel(this._compensate + db, ms); | ||
} | ||
} | ||
const audioNodeSuite = { | ||
AudioNodeComposite, | ||
AudioNodeNoise, | ||
AudioNodeGain, | ||
AudioNodeCompressor, | ||
AudioNodeLimiter, | ||
AudioNodeEqualizer, | ||
AudioNodeMeter, | ||
AudioNodeGate, | ||
AudioNodeAmplitude, | ||
AudioNodeSpectrum, | ||
AudioNodeVoice | ||
}; | ||
return audioNodeSuite; | ||
}); |
{ | ||
"name": "audio-node-suite", | ||
"version": "1.0.0", | ||
"version": "1.1.0", | ||
"description": "Audio-Node-Suite -- Web Audio API AudioNode Suite", | ||
@@ -5,0 +5,0 @@ "keywords": [ "web", "audio", "api", "audionode", "suite" ], |
@@ -26,3 +26,3 @@ | ||
`AudioNode`. As an additional goodie, the class provides a useful | ||
`bypass()` method for temporarily bypassing the effect of the | ||
"by-pass" functionality for temporarily by-passing the effect of the | ||
underlying `AudioNode` instances. | ||
@@ -29,0 +29,0 @@ |
@@ -33,9 +33,2 @@ /* | ||
/* get the value at a certain frequency in a bucket of frequencies (as returned by "getFloatFrequencyData" */ | ||
export const getFrequencyValue = (ctx: AudioContext, freq: number, buckets: Float32Array) => { | ||
const nyquist = ctx.sampleRate / 2 | ||
const index = Math.round(freq / nyquist * buckets.length) | ||
return buckets[index] | ||
} | ||
/* ensure a value is within min/max boundaries */ | ||
@@ -42,0 +35,0 @@ export const ensureWithin = (val: number, min: number, max: number) => { |
@@ -25,15 +25,23 @@ /* | ||
/* internal signature of connect/disconnect methods */ | ||
type connect = (...args: any[]) => any | ||
type disconnect = (...args: any[]) => any | ||
/* Composite Web Audio API AudioNode */ | ||
export class AudioNodeComposite extends GainNode { | ||
private _bypass = false | ||
private _connect: any | ||
private _disconnect: any | ||
private _targets = [] as any[] | ||
declare public context: BaseAudioContext | ||
declare public input: AudioNode | ||
declare public output: AudioNode | ||
declare public bypass: (bypass: boolean) => void | ||
constructor (input: AudioNode, output: AudioNode = input) { | ||
super(input.context) | ||
/* configured input/output nodes of composed chain */ | ||
public input: AudioNode | null = null | ||
public output: AudioNode | null = null | ||
/* internal state */ | ||
private _bypass = false /* whether to bypass node */ | ||
private _targets = [] as any[] /* tracked connected targets */ | ||
/* just pass-through construction */ | ||
constructor (context: AudioContext) { | ||
super(context) | ||
} | ||
/* configure input/output chain */ | ||
chain (input: AudioNode, output: AudioNode = input) { | ||
/* require at least a wrapped input node */ | ||
@@ -43,107 +51,90 @@ if (typeof input !== "object" || !(input instanceof AudioNode)) | ||
/* determine AudioContext via input node */ | ||
const context = input.context | ||
/* configure chain */ | ||
this.input = input | ||
this.output = output | ||
/* use a no-op AudioNode node to represent us */ | ||
let node: AudioNodeComposite | ||
if (input.numberOfInputs > 0) { | ||
node = context.createGain() as unknown as AudioNodeComposite | ||
node.connect(input) | ||
if (this._bypass) { | ||
/* bypass mode: connect us to targets directly */ | ||
for (const _target of this._targets) | ||
(super.connect as connect)(..._target) | ||
} | ||
else { | ||
const bs = context.createBufferSource() | ||
bs.buffer = null | ||
node = bs as unknown as AudioNodeComposite | ||
/* regular mode: connect us to to targets via input/output nodes */ | ||
for (const _target of this._targets) { | ||
(super.disconnect as disconnect)(..._target); | ||
(this.output.connect as connect)(..._target) | ||
} | ||
if (this.input.numberOfInputs > 0) | ||
(super.connect as connect)(this.input) | ||
} | ||
} | ||
/* track the connected targets and bypass state */ | ||
node._targets = [] as AudioNode[] | ||
node._bypass = false | ||
/* provide an overloaded Web API "connect" method */ | ||
connect (...args: any[]): any { | ||
/* track target */ | ||
this._targets.push(args) | ||
/* provide an overloaded Web API "connect" method */ | ||
node._connect = node.connect | ||
const connect = (...args: any[]): any => { | ||
/* track target */ | ||
node._targets.push(args) | ||
/* connect us to target node */ | ||
let result: any | ||
if (this._bypass || this.output === null) | ||
result = (super.connect as connect)(...args) | ||
else | ||
result = (this.output.connect as connect)(...args) | ||
return result | ||
} | ||
/* connect us to target node */ | ||
let result: any | ||
if (node._bypass) { | ||
if (input.numberOfInputs > 0) | ||
result = node._connect(...args) | ||
else | ||
result = (input.connect as (...args: any[]) => any)(...args) | ||
} | ||
else | ||
result = (output.connect as (...args: any[]) => any)(...args) | ||
/* provide an overloaded Web API "disconnect" method */ | ||
disconnect (...args: any[]): any { | ||
/* disconnect us from target node */ | ||
let result: any | ||
if (this._bypass || this.output === null) | ||
result = (super.disconnect as disconnect)(...args) | ||
else | ||
result = (this.output.disconnect as disconnect)(...args) | ||
return result | ||
} | ||
node.connect = connect | ||
/* provide an overloaded Web API "disconnect" method */ | ||
node._disconnect = node.disconnect | ||
node.disconnect = (...args: any[]): any => { | ||
/* disconnect us from target node */ | ||
let result: any | ||
if (node._bypass) { | ||
if (input.numberOfInputs > 0) | ||
result = node._disconnect(...args) | ||
else | ||
result = (input.connect as (...args: any[]) => any)(...args) | ||
} | ||
else | ||
result = (output.disconnect as (...args: any[]) => any)(...args) | ||
/* untrack target */ | ||
node._targets = node._targets.filter((_target: any[]) => { | ||
if (_target.length !== args.length) | ||
/* untrack target */ | ||
this._targets = this._targets.filter((_target: any[]) => { | ||
if (_target.length !== args.length) | ||
return true | ||
for (let i = 0; i < args.length; i++) | ||
if (_target[i] !== args[i]) | ||
return true | ||
for (let i = 0; i < args.length; i++) | ||
if (_target[i] !== args[i]) | ||
return true | ||
return false | ||
}) | ||
return false | ||
}) | ||
return result | ||
} | ||
return result | ||
} | ||
/* provide a custom "bypass" method */ | ||
node.bypass = (bypass: boolean) => { | ||
/* short-circuit no operations */ | ||
if (node._bypass === bypass) | ||
return | ||
/* provide a custom "bypass" method */ | ||
bypass (bypass: boolean) { | ||
/* short-circuit no operations */ | ||
if (this._bypass === bypass) | ||
return | ||
/* take over new state and dispatch according to it */ | ||
node._bypass = bypass | ||
if (node._bypass) { | ||
/* bypass mode: connect us to targets directly */ | ||
if (input.numberOfInputs > 0) | ||
node._disconnect(input) | ||
for (const _target of node._targets) { | ||
(output.disconnect as (...args: any[]) => any)(..._target) | ||
node._connect(..._target) | ||
} | ||
/* take over new state and dispatch according to it */ | ||
this._bypass = bypass | ||
if (this._bypass) { | ||
/* bypass mode: connect us to targets directly */ | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
(super.disconnect as disconnect)(this.input) | ||
for (const _target of this._targets) { | ||
if (this.output !== null) | ||
(this.output.disconnect as disconnect)(..._target); | ||
(super.connect as connect)(..._target) | ||
} | ||
else { | ||
/* regular mode: connect us to to targets via input/output nodes */ | ||
for (const _target of node._targets) { | ||
node._disconnect.apply(null, _target); | ||
(output.connect as (...args: any[]) => any)(..._target) | ||
} | ||
if (input.numberOfInputs > 0) | ||
node._connect(input) | ||
} | ||
else { | ||
/* regular mode: connect us to to targets via input/output nodes */ | ||
for (const _target of this._targets) { | ||
(super.disconnect as disconnect)(..._target) | ||
if (this.output !== null) | ||
(this.output.connect as connect)(..._target) | ||
} | ||
if (this.input !== null && this.input.numberOfInputs > 0) | ||
(super.connect as connect)(this.input) | ||
} | ||
/* pass-through input and output nodes */ | ||
node.input = input | ||
node.output = output | ||
/* return our "AudioNode" representation (instead of ourself) */ | ||
return node | ||
} | ||
/* factory for Composite Web Audio API AudioNode */ | ||
static factory (nodes: AudioNode[]) { | ||
/* provide convenient factory method */ | ||
static factory (context: AudioContext, nodes: AudioNode[]) { | ||
if (nodes.length < 1) | ||
@@ -153,5 +144,7 @@ throw new Error("at least one node has to be given") | ||
nodes[i].connect(nodes[i + 1]) | ||
return new AudioNodeComposite(nodes[0], nodes[nodes.length - 1]) | ||
const composite = new AudioNodeComposite(context) | ||
composite.chain(nodes[0], nodes[nodes.length - 1]) | ||
return composite | ||
} | ||
} | ||
@@ -30,9 +30,12 @@ /* | ||
/* custom AudioNode: silence */ | ||
export class AudioNodeSilence { | ||
export class AudioNodeSilence extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { channels?: number } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
params.channels ??= 1 | ||
/* create underlying BufferSource node */ | ||
/* configure the underlying BufferSource node */ | ||
const bs = context.createBufferSource() | ||
bs.channelCount = params.channels | ||
bs.buffer = null | ||
@@ -42,4 +45,3 @@ bs.loop = true | ||
/* return convenient composite */ | ||
return (new AudioNodeComposite(bs) as unknown as AudioNodeSilence) | ||
this.chain(bs) | ||
} | ||
@@ -49,4 +51,6 @@ } | ||
/* custom AudioNode: noise */ | ||
export class AudioNodeNoise { | ||
export class AudioNodeNoise extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { type?: string, channels?: number } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -110,8 +114,7 @@ params.type ??= "pink" | ||
const bs = context.createBufferSource() | ||
bs.channelCount = params.channels | ||
bs.buffer = buffer | ||
bs.loop = true | ||
bs.start(0) | ||
/* return convenient composite */ | ||
return (new AudioNodeComposite(bs) as unknown as AudioNodeNoise) | ||
this.chain(bs) | ||
} | ||
@@ -121,6 +124,6 @@ } | ||
/* custom AudioNode: mute */ | ||
export class AudioNodeMute { | ||
declare public mute: (mute: boolean, ms?: number) => void | ||
declare public muted: () => boolean | ||
export class AudioNodeMute extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { muted?: boolean } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -130,20 +133,15 @@ params.muted ??= false | ||
/* create and configure underlying Gain node */ | ||
const gain = context.createGain() | ||
gain.gain.setValueAtTime(params.muted ? 0.0 : 1.0, context.currentTime) | ||
/* create and return convenient composite */ | ||
const node = (new AudioNodeComposite(gain) as unknown as AudioNodeMute) | ||
node.mute = (_mute: boolean, ms = 10) => { | ||
const value = _mute ? 0.0 : 1.0 | ||
console.log("FUCK", _mute, value) | ||
gain.gain.linearRampToValueAtTime(value, context.currentTime + ms / 1000) | ||
} | ||
return node | ||
this.gain.setValueAtTime(params.muted ? 0.0 : 1.0, this.context.currentTime) | ||
} | ||
mute (_mute: boolean, ms = 10) { | ||
const value = _mute ? 0.0 : 1.0 | ||
this.gain.linearRampToValueAtTime(value, this.context.currentTime + ms / 1000) | ||
} | ||
} | ||
/* custom AudioNode: gain */ | ||
export class AudioNodeGain { | ||
declare public adjustGainDecibel: (db: number, ms: number) => void | ||
export class AudioNodeGain extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { gain?: number } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -153,19 +151,15 @@ params.gain ??= 0 | ||
/* create and configure underlying Gain node */ | ||
const gain = context.createGain() | ||
gain.gain.setValueAtTime(dBFSToGain(params.gain), context.currentTime) | ||
/* create and return convenient composite */ | ||
const node = (new AudioNodeComposite(gain) as unknown as AudioNodeGain) | ||
node.adjustGainDecibel = (db: number, ms = 10) => { | ||
((node as unknown as AudioNodeComposite).input as GainNode) | ||
.gain.linearRampToValueAtTime(dBFSToGain(db), context.currentTime + ms / 1000) | ||
} | ||
return node | ||
this.gain.setValueAtTime(dBFSToGain(params.gain), this.context.currentTime) | ||
} | ||
adjustGainDecibel (db: number, ms = 10) { | ||
this.gain.linearRampToValueAtTime(dBFSToGain(db), this.context.currentTime + ms / 1000) | ||
} | ||
} | ||
/* custom AudioNode: compressor */ | ||
export class AudioNodeCompressor { | ||
export class AudioNodeCompressor extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { threshold?: number, attack?: number, | ||
release?: number, knee?: number, ratio?: number } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -186,4 +180,4 @@ params.threshold ??= -16.0 | ||
/* return convenient composite */ | ||
return (new AudioNodeComposite(compressor) as unknown as AudioNodeCompressor) | ||
/* configure compressor as sub-chain */ | ||
this.chain(compressor) | ||
} | ||
@@ -193,5 +187,7 @@ } | ||
/* custom AudioNode: limiter */ | ||
export class AudioNodeLimiter { | ||
export class AudioNodeLimiter extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { threshold?: number, attack?: number, | ||
release?: number, knee?: number, ratio?: number } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -212,6 +208,6 @@ params.threshold ??= -3.0 | ||
/* return convenient composite */ | ||
return (new AudioNodeComposite(limiter) as unknown as AudioNodeLimiter) | ||
/* configure limiter as sub-chain */ | ||
this.chain(limiter) | ||
} | ||
} | ||
@@ -29,6 +29,8 @@ /* | ||
/* custom AudioNode: parametric equalizer */ | ||
export class AudioNodeEqualizer { | ||
export class AudioNodeEqualizer extends AudioNodeComposite { | ||
/* global BiquadFilterType */ | ||
constructor (context: AudioContext, params: { bands?: | ||
Array<{ type?: BiquadFilterType, freq?: number, q?: number, gain?: number }> } = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -66,7 +68,7 @@ params.bands ??= [] | ||
if (params.bands.length === 1) | ||
return new AudioNodeComposite(bands[0]) | ||
this.chain(bands[0]) | ||
else | ||
return new AudioNodeComposite(bands[0], bands[bands.length - 1]) | ||
this.chain(bands[0], bands[bands.length - 1]) | ||
} | ||
} | ||
@@ -30,3 +30,3 @@ /* | ||
/* custom AudioNode: meter */ | ||
export class AudioNodeMeter { | ||
export class AudioNodeMeter extends AudioNodeComposite { | ||
declare public dataT: () => Float32Array | ||
@@ -43,2 +43,4 @@ declare public dataF: () => Float32Array | ||
} = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -58,2 +60,3 @@ params.fftSize ??= 512 | ||
analyser.smoothingTimeConstant = params.smoothingTimeConstant | ||
this.chain(analyser) | ||
@@ -104,9 +107,7 @@ /* initialize internal state */ | ||
/* wrap node into a composite and allow caller to access internals */ | ||
const composite = (new AudioNodeComposite(analyser) as unknown as AudioNodeMeter) | ||
composite.dataT = () => dataT | ||
composite.dataF = () => dataF | ||
composite.stat = () => stat | ||
return composite | ||
this.dataT = () => dataT | ||
this.dataF = () => dataF | ||
this.stat = () => stat | ||
} | ||
} | ||
@@ -31,3 +31,3 @@ /* | ||
/* custom AudioNode: (noise) gate */ | ||
export class AudioNodeGate { | ||
export class AudioNodeGate extends AudioNodeComposite { | ||
constructor (context: AudioContext, params: { | ||
@@ -42,2 +42,4 @@ threshold?: number, /* open above threshold (dbFS) */ | ||
} = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -63,4 +65,4 @@ params.threshold ??= -45 | ||
/* leverage Gain node for changing the gain */ | ||
const gain = context.createGain() as GainNode | ||
(meter as unknown as AudioNode).connect(gain) | ||
const gain = context.createGain() | ||
meter.connect(gain) | ||
@@ -150,6 +152,6 @@ /* continuously control gain */ | ||
/* return compose node */ | ||
return (new AudioNodeComposite(meter as unknown as AudioNode, gain)) as unknown as AudioNodeGate | ||
/* configure chain */ | ||
this.chain(meter, gain) | ||
} | ||
} | ||
@@ -26,31 +26,38 @@ /* | ||
/* internal requirements */ | ||
import { AnimationFrameTimer } from "./audio-node-suite-1-util.js" | ||
import { AudioNodeMeter } from "./audio-node-suite-5-meter.js" | ||
import { AnimationFrameTimer } from "./audio-node-suite-1-util.js" | ||
import { AudioNodeMeter } from "./audio-node-suite-5-meter.js" | ||
/* parameter pre-definition */ | ||
type AudioNodeAmplitudeParams = { | ||
fftSize?: number, /* FFT size (default: 512) */ | ||
minDecibels?: number, /* FFT minimum decibels (default: -60) */ | ||
maxDecibels?: number, /* FFT maximum decibels (default: 0) */ | ||
smoothingTimeConstant?: number, /* FFT smoothing time constant (default: 0.8) */ | ||
intervalTime?: number, /* interval time in milliseconds to act (default: 1000 / 60) */ | ||
intervalCount?: number, /* interval length for average calculations (default: 300 / (1000 / 60)) */ | ||
decibelBars?: number[], /* list of decibel layers to draw (default: [ -60, -45, -21, -6 ]) */ | ||
colorBars?: string[], /* list of color layers to draw (default: [ "#306090", "#00b000", "#e0d000", "#e03030" ]) */ | ||
colorBarsDeactive?: string[], /* list of color layers to draw (default: [ "#606060", "#808080", "#a0a0a0", "#c0c0c0" ]) */ | ||
colorRMS?: string, /* color of the RMS decibel (default: "#ffffff") */ | ||
colorBackground?: string, /* color of the background (default: "#000000") */ | ||
horizontal?: boolean /* whether to draw horizontall instead of vertically (default: false) */ | ||
} | ||
/* custom AudioNode: amplitude visualizer */ | ||
export class AudioNodeAmplitude { | ||
declare public deactive: (deactive: boolean) => void | ||
declare public draw: (canvas: HTMLCanvasElement) => void | ||
declare public undraw: (canvas: HTMLCanvasElement) => void | ||
constructor (context: AudioContext, params: { | ||
fftSize?: number, /* FFT size (default: 512) */ | ||
minDecibels?: number, /* FFT minimum decibels (default: -60) */ | ||
maxDecibels?: number, /* FFT maximum decibels (default: 0) */ | ||
smoothingTimeConstant?: number, /* FFT smoothing time constant (default: 0.8) */ | ||
intervalTime?: number, /* interval time in milliseconds to act (default: 1000 / 60) */ | ||
intervalCount?: number /* interval length for average calculations (default: 300 / (1000 / 60)) */ | ||
decibelBars?: number[], /* list of decibel layers to draw (default: [ -60, -45, -21, -6 ]) */ | ||
colorBars?: string[], /* list of color layers to draw (default: [ "#306090", "#00b000", "#e0d000", "#e03030" ]) */ | ||
colorBarsDeactive?: string[], /* list of color layers to draw (default: [ "#606060", "#808080", "#a0a0a0", "#c0c0c0" ]) */ | ||
colorRMS?: string, /* color of the RMS decibel (default: "#ffffff") */ | ||
colorBackground?: string, /* color of the background (default: "#000000") */ | ||
horizontal?: boolean /* whether to draw horizontall instead of vertically (default: false) */ | ||
} = {}) { | ||
/* provide parameter defaults */ | ||
params.fftSize ??= 512 | ||
params.minDecibels ??= -60 | ||
params.maxDecibels ??= 0 | ||
params.smoothingTimeConstant ??= 0.80 | ||
params.intervalTime ??= 1000 / 60 | ||
params.intervalCount ??= Math.round(300 / (1000 / 60)) /* for 300ms RMS/m */ | ||
export class AudioNodeAmplitude extends AudioNodeMeter { | ||
private _canvases = [] as HTMLCanvasElement[] | ||
private _timer: AnimationFrameTimer | null = null | ||
private _deactive = false | ||
private _params: AudioNodeAmplitudeParams | ||
constructor (context: AudioContext, params: AudioNodeAmplitudeParams = {}) { | ||
super(context, { | ||
fftSize: (params.fftSize ??= 512), | ||
minDecibels: (params.minDecibels ??= -60), | ||
maxDecibels: (params.maxDecibels ??= 0), | ||
smoothingTimeConstant: (params.smoothingTimeConstant ??= 0.80), | ||
intervalTime: (params.intervalTime ??= 1000 / 60), | ||
intervalCount: (params.intervalCount ??= Math.round(300 / (1000 / 60))) /* for 300ms RMS/m */ | ||
}) | ||
/* provide parameter defaults (remaining ones) */ | ||
params.decibelBars ??= [ -60, -45, -21, -6 ] | ||
@@ -63,89 +70,76 @@ params.colorBars ??= [ "#306090", "#00b000", "#e0d000", "#e03030" ] | ||
/* create meter */ | ||
const meter = new AudioNodeMeter(context, { | ||
fftSize: params.fftSize, | ||
minDecibels: params.minDecibels, | ||
maxDecibels: params.maxDecibels, | ||
smoothingTimeConstant: params.smoothingTimeConstant, | ||
intervalTime: params.intervalTime, | ||
intervalCount: params.intervalCount | ||
}) | ||
this._params = params | ||
} | ||
/* internal state */ | ||
let canvases = [] as HTMLCanvasElement[] | ||
let timer: AnimationFrameTimer | ||
/* draw spectrum into canvas */ | ||
private _draw (canvas: HTMLCanvasElement) { | ||
/* determine meter information */ | ||
const peak = this.stat().peak | ||
const rms = this.stat().rmsM | ||
/* allow caller to adjust our mute state */ | ||
let deactive = false; | ||
(meter as unknown as AudioNodeAmplitude).deactive = (_deactive) => { deactive = _deactive } | ||
/* prepare canvas */ | ||
const canvasCtx = canvas.getContext("2d") | ||
canvasCtx!.fillStyle = this._params.colorBackground! | ||
canvasCtx!.fillRect(0, 0, canvas.width, canvas.height) | ||
/* draw spectrum into canvas */ | ||
const _draw = (canvas: HTMLCanvasElement) => { | ||
/* determine meter information */ | ||
const peak = meter.stat().peak | ||
const rms = meter.stat().rmsM | ||
/* prepare canvas */ | ||
const canvasCtx = canvas.getContext("2d") | ||
canvasCtx!.fillStyle = params.colorBackground! | ||
canvasCtx!.fillRect(0, 0, canvas.width, canvas.height) | ||
const colorBars = deactive ? params.colorBarsDeactive! : params.colorBars! | ||
const scaleToCanvasUnits = (value: number) => { | ||
if (params.horizontal) | ||
return (value / (params.maxDecibels! - params.minDecibels!)) * canvas.width | ||
else | ||
return (value / (params.maxDecibels! - params.minDecibels!)) * canvas.height | ||
} | ||
const drawSeg = (from: number, to: number, color: string) => { | ||
const b = scaleToCanvasUnits(Math.abs(to - params.minDecibels!)) | ||
const h = scaleToCanvasUnits(Math.abs(to - from)) | ||
canvasCtx!.fillStyle = color | ||
if (params.horizontal) | ||
canvasCtx!.fillRect(b - h, 0, h, canvas.height) | ||
else | ||
canvasCtx!.fillRect(0, canvas.height - b, canvas.width, h) | ||
} | ||
const len = Math.min(params.decibelBars!.length, colorBars.length) | ||
let from = params.minDecibels! | ||
let color = colorBars[0] | ||
for (let i = 0; i < len; i++) { | ||
if (peak < params.decibelBars![i]) | ||
break | ||
else { | ||
const to = params.decibelBars![i] | ||
drawSeg(from, to, color) | ||
color = colorBars[i] | ||
from = to | ||
} | ||
} | ||
drawSeg(from, peak, color) | ||
const h = scaleToCanvasUnits(Math.abs(rms - params.minDecibels!)) | ||
canvasCtx!.fillStyle = params.colorRMS! | ||
if (params.horizontal!) | ||
canvasCtx!.fillRect(h - 1, 0, 1, canvas.height) | ||
const colorBars = this._deactive ? this._params.colorBarsDeactive! : this._params.colorBars! | ||
const scaleToCanvasUnits = (value: number) => { | ||
if (this._params.horizontal) | ||
return (value / (this._params.maxDecibels! - this._params.minDecibels!)) * canvas.width | ||
else | ||
canvasCtx!.fillRect(0, canvas.height - h, canvas.width, 1) | ||
return (value / (this._params.maxDecibels! - this._params.minDecibels!)) * canvas.height | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
(meter as unknown as AudioNodeAmplitude).draw = function (canvas: HTMLCanvasElement) { | ||
canvases.push(canvas) | ||
if (canvases.length === 1) { | ||
timer = new AnimationFrameTimer(() => { | ||
for (const canvas of canvases) | ||
_draw(canvas) | ||
}) | ||
const drawSeg = (from: number, to: number, color: string) => { | ||
const b = scaleToCanvasUnits(Math.abs(to - this._params.minDecibels!)) | ||
const h = scaleToCanvasUnits(Math.abs(to - from)) | ||
canvasCtx!.fillStyle = color | ||
if (this._params.horizontal) | ||
canvasCtx!.fillRect(b - h, 0, h, canvas.height) | ||
else | ||
canvasCtx!.fillRect(0, canvas.height - b, canvas.width, h) | ||
} | ||
const len = Math.min(this._params.decibelBars!.length, colorBars.length) | ||
let from = this._params.minDecibels! | ||
let color = colorBars[0] | ||
for (let i = 0; i < len; i++) { | ||
if (peak < this._params.decibelBars![i]) | ||
break | ||
else { | ||
const to = this._params.decibelBars![i] | ||
drawSeg(from, to, color) | ||
color = colorBars[i] | ||
from = to | ||
} | ||
}; | ||
(meter as unknown as AudioNodeAmplitude).undraw = function (canvas: HTMLCanvasElement) { | ||
canvases = canvases.filter((c) => c !== canvas) | ||
if (canvases.length === 0) | ||
timer.clear() | ||
} | ||
drawSeg(from, peak, color) | ||
return (meter as unknown as AudioNodeAmplitude) | ||
const h = scaleToCanvasUnits(Math.abs(rms - this._params.minDecibels!)) | ||
canvasCtx!.fillStyle = this._params.colorRMS! | ||
if (this._params.horizontal!) | ||
canvasCtx!.fillRect(h - 1, 0, 1, canvas.height) | ||
else | ||
canvasCtx!.fillRect(0, canvas.height - h, canvas.width, 1) | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
draw (canvas: HTMLCanvasElement) { | ||
this._canvases.push(canvas) | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas of this._canvases) | ||
this._draw(canvas) | ||
}) | ||
} | ||
} | ||
undraw (canvas: HTMLCanvasElement) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas) | ||
if (this._canvases.length === 0) | ||
this._timer!.clear() | ||
} | ||
/* allow deactivation control */ | ||
deactive (_deactive: boolean) { | ||
this._deactive = _deactive | ||
} | ||
} | ||
@@ -29,26 +29,34 @@ /* | ||
/* parameter pre-definition */ | ||
type AudioNodeSpectrumParams = { | ||
fftSize?: number, /* FFT size (default: 8192) */ | ||
minDecibels?: number, /* FFT minimum decibels (default: -144) */ | ||
maxDecibels?: number, /* FFT maximum decibels (default: 0) */ | ||
smoothingTimeConstant?: number, /* FFT smoothing time constant (default: 0.8) */ | ||
intervalTime?: number, /* interval time in milliseconds to act (default: 1000 / 60) */ | ||
layers?: number[], /* list of decibel layers to draw (default: [ -120, -90, -60, -50, -40, -30, -20, -10 ]) */ | ||
slices?: number[], /* list of frequency slices to draw (default: [ 40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480 ]) */ | ||
colorBackground?: string, /* color of the background (default: "#000000") */ | ||
colorBars?: string, /* color of the spectrum bars (default: "#00cc00") */ | ||
colorLayers?: string, /* color of the decibel layer lines (default: "#009900") */ | ||
colorSlices?: string, /* color of the frequency slice lines (default: "#009900") */ | ||
logarithmic?: boolean /* whether to use logarithmic scale for frequencies (default: true) */ | ||
} | ||
/* custom AudioNode: spectrum visualizer */ | ||
export class AudioNodeSpectrum { | ||
declare public draw: (canvas: HTMLCanvasElement) => void | ||
declare public undraw: (canvas: HTMLCanvasElement) => void | ||
constructor (context: AudioContext, params: { | ||
fftSize?: number, /* FFT size (default: 8192) */ | ||
minDecibels?: number, /* FFT minimum decibels (default: -144) */ | ||
maxDecibels?: number, /* FFT maximum decibels (default: 0) */ | ||
smoothingTimeConstant?: number, /* FFT smoothing time constant (default: 0.8) */ | ||
intervalTime?: number, /* interval time in milliseconds to act (default: 1000 / 60) */ | ||
layers?: number[], /* list of decibel layers to draw (default: [ -120, -90, -60, -50, -40, -30, -20, -10 ]) */ | ||
slices?: number[], /* list of frequency slices to draw (default: [ 40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480 ]) */ | ||
colorBackground?: string, /* color of the background (default: "#000000") */ | ||
colorBars?: string, /* color of the spectrum bars (default: "#00cc00") */ | ||
colorLayers?: string, /* color of the decibel layer lines (default: "#009900") */ | ||
colorSlices?: string, /* color of the frequency slice lines (default: "#009900") */ | ||
logarithmic?: boolean /* whether to use logarithmic scale for frequencies (default: true) */ | ||
} = {}) { | ||
export class AudioNodeSpectrum extends AudioNodeMeter { | ||
private _canvases = [] as HTMLCanvasElement[] | ||
private _timer: AnimationFrameTimer | null = null | ||
private _params: AudioNodeSpectrumParams | ||
constructor (context: AudioContext, params: AudioNodeSpectrumParams = {}) { | ||
super(context, { | ||
fftSize: (params.fftSize ??= 8192), | ||
minDecibels: (params.minDecibels ??= -144), | ||
maxDecibels: (params.maxDecibels ??= 0), | ||
smoothingTimeConstant: (params.smoothingTimeConstant ??= 0.80), | ||
intervalTime: (params.intervalTime ??= 1000 / 60), | ||
intervalCount: 0 | ||
}) | ||
/* provide parameter defaults */ | ||
params.fftSize ??= 8192 | ||
params.minDecibels ??= -144 | ||
params.maxDecibels ??= 0 | ||
params.smoothingTimeConstant ??= 0.80 | ||
params.intervalTime ??= 1000 / 60 | ||
params.layers ??= [ -120, -90, -60, -50, -40, -30, -20, -10 ] | ||
@@ -62,109 +70,95 @@ params.slices ??= [ 40, 80, 160, 320, 640, 1280, 2560, 5120, 10240, 20480 ] | ||
/* create meter */ | ||
const meter = new AudioNodeMeter(context, { | ||
fftSize: params.fftSize!, | ||
minDecibels: params.minDecibels!, | ||
maxDecibels: params.maxDecibels!, | ||
smoothingTimeConstant: params.smoothingTimeConstant!, | ||
intervalTime: params.intervalTime!, | ||
intervalCount: 0 | ||
}) | ||
this._params = params | ||
} | ||
/* internal state */ | ||
let canvases = [] as HTMLCanvasElement[] | ||
let timer: AnimationFrameTimer | ||
/* draw spectrum into canvas */ | ||
private _draw (canvas: HTMLCanvasElement) { | ||
/* determine meter information */ | ||
const data = this.dataF() | ||
/* draw spectrum into canvas */ | ||
const _draw = (canvas: HTMLCanvasElement) => { | ||
/* determine meter information */ | ||
const data = meter.dataF() | ||
/* prepare canvas */ | ||
const canvasCtx = canvas.getContext("2d") | ||
canvasCtx!.fillStyle = this._params.colorBackground! | ||
canvasCtx!.fillRect(0, 0, canvas.width, canvas.height) | ||
/* prepare canvas */ | ||
const canvasCtx = canvas.getContext("2d") | ||
canvasCtx!.fillStyle = params.colorBackground! | ||
canvasCtx!.fillRect(0, 0, canvas.width, canvas.height) | ||
/* helper function for scaling decibel to canvas units */ | ||
const scaleToCanvasUnits = (value: number) => | ||
(value / (this._params.maxDecibels! - this._params.minDecibels!)) * canvas.height | ||
/* helper function for scaling decibel to canvas units */ | ||
const scaleToCanvasUnits = (value: number) => | ||
(value / (params.maxDecibels! - params.minDecibels!)) * canvas.height | ||
/* draw horizontal decibel layers */ | ||
canvasCtx!.fillStyle = this._params.colorLayers! | ||
for (const layer of this._params.layers!) { | ||
const barHeight = scaleToCanvasUnits(Math.abs(layer - this._params.minDecibels!)) | ||
canvasCtx!.fillRect(0, canvas.height - barHeight, canvas.width, 1) | ||
} | ||
/* draw horizontal decibel layers */ | ||
canvasCtx!.fillStyle = params.colorLayers! | ||
for (const layer of params.layers!) { | ||
const barHeight = scaleToCanvasUnits(Math.abs(layer - params.minDecibels!)) | ||
canvasCtx!.fillRect(0, canvas.height - barHeight, canvas.width, 1) | ||
} | ||
/* draw vertical frequency slices */ | ||
canvasCtx!.fillStyle = this._params.colorSlices! | ||
for (const slice of this._params.slices!) { | ||
/* project from logarithmic frequency to canvas x-position */ | ||
const x = Math.log2(slice / 20) * (canvas.width / 10) | ||
canvasCtx!.fillRect(x, 0, 1, canvas.height) | ||
} | ||
/* draw vertical frequency slices */ | ||
canvasCtx!.fillStyle = params.colorSlices! | ||
for (const slice of params.slices!) { | ||
/* project from logarithmic frequency to canvas x-position */ | ||
const x = Math.log2(slice / 20) * (canvas.width / 10) | ||
canvasCtx!.fillRect(x, 0, 1, canvas.height) | ||
} | ||
/* draw the decibel per frequency bars */ | ||
canvasCtx!.fillStyle = this._params.colorBars! | ||
if (this._params.logarithmic!) { | ||
/* iterate over all canvas x-positions */ | ||
for (let posX = 0; posX < canvas.width; posX++) { | ||
const barWidth = 1 | ||
/* draw the decibel per frequency bars */ | ||
canvasCtx!.fillStyle = params.colorBars! | ||
if (params.logarithmic!) { | ||
/* iterate over all canvas x-positions */ | ||
for (let posX = 0; posX < canvas.width; posX++) { | ||
const barWidth = 1 | ||
/* project from canvas x-position to logarithmic frequency */ | ||
const f1 = 20 * Math.pow(2, posX * 10 / canvas.width) | ||
const f2 = 20 * Math.pow(2, (posX + 1) * 10 / canvas.width) | ||
/* project from canvas x-position to logarithmic frequency */ | ||
const f1 = 20 * Math.pow(2, posX * 10 / canvas.width) | ||
const f2 = 20 * Math.pow(2, (posX + 1) * 10 / canvas.width) | ||
/* project from logarithmic frequency to linear FFT decibel value */ | ||
const k1 = Math.round(f1 * (data.length / (20 * Math.pow(2, 10)))) | ||
let k2 = Math.round(f2 * (data.length / (20 * Math.pow(2, 10)))) - 1 | ||
if (k2 < k1) | ||
k2 = k1 | ||
/* project from logarithmic frequency to linear FFT decibel value */ | ||
const k1 = Math.round(f1 * (data.length / (20 * Math.pow(2, 10)))) | ||
let k2 = Math.round(f2 * (data.length / (20 * Math.pow(2, 10)))) - 1 | ||
if (k2 < k1) | ||
k2 = k1 | ||
/* calculate the average decibel in case multiple FFT decibel values are in the range */ | ||
let db = 0 | ||
for (let k = k1; k <= k2; k++) | ||
db += data[k] | ||
db /= (k2 + 1) - k1 | ||
/* calculate the average decibel in case multiple FFT decibel values are in the range */ | ||
let db = 0 | ||
for (let k = k1; k <= k2; k++) | ||
db += data[k] | ||
db /= (k2 + 1) - k1 | ||
/* draw the bar */ | ||
const barHeight = scaleToCanvasUnits(db - params.minDecibels!) | ||
canvasCtx!.fillRect(posX, canvas.height - barHeight, barWidth, barHeight) | ||
} | ||
/* draw the bar */ | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels!) | ||
canvasCtx!.fillRect(posX, canvas.height - barHeight, barWidth, barHeight) | ||
} | ||
else { | ||
let posX = 0 | ||
const barWidth = (canvas.width / data.length) | ||
} | ||
else { | ||
let posX = 0 | ||
const barWidth = (canvas.width / data.length) | ||
/* iterate over all FFT decibel values */ | ||
for (let i = 0; i < data.length; i++) { | ||
const db = data[i] | ||
/* iterate over all FFT decibel values */ | ||
for (let i = 0; i < data.length; i++) { | ||
const db = data[i] | ||
/* draw the bar */ | ||
const barHeight = scaleToCanvasUnits(db - params.minDecibels!) | ||
canvasCtx!.fillRect(posX, canvas.height - barHeight, barWidth - 0.5, barHeight) | ||
/* draw the bar */ | ||
const barHeight = scaleToCanvasUnits(db - this._params.minDecibels!) | ||
canvasCtx!.fillRect(posX, canvas.height - barHeight, barWidth - 0.5, barHeight) | ||
posX += barWidth | ||
} | ||
posX += barWidth | ||
} | ||
} | ||
} | ||
/* add/remove canvas for spectrum visualization */ | ||
(meter as unknown as AudioNodeSpectrum).draw = function (canvas: HTMLCanvasElement) { | ||
canvases.push(canvas) | ||
if (canvases.length === 1) { | ||
timer = new AnimationFrameTimer(() => { | ||
for (const canvas of canvases) | ||
_draw(canvas) | ||
}) | ||
} | ||
}; | ||
(meter as unknown as AudioNodeSpectrum).undraw = function (canvas: HTMLCanvasElement) { | ||
canvases = canvases.filter((c) => c !== canvas) | ||
if (canvases.length === 0) | ||
timer.clear() | ||
/* add/remove canvas for spectrum visualization */ | ||
draw (canvas: HTMLCanvasElement) { | ||
this._canvases.push(canvas) | ||
if (this._canvases.length === 1) { | ||
this._timer = new AnimationFrameTimer(() => { | ||
for (const canvas of this._canvases) | ||
this._draw(canvas) | ||
}) | ||
} | ||
return (meter as unknown as AudioNodeSpectrum) | ||
} | ||
undraw (canvas: HTMLCanvasElement) { | ||
this._canvases = this._canvases.filter((c) => c !== canvas) | ||
if (this._canvases.length === 0) | ||
this._timer!.clear() | ||
} | ||
} | ||
@@ -43,5 +43,6 @@ /* | ||
/* custom AudioNode: voice filter */ | ||
export class AudioNodeVoice { | ||
declare public mute: (mute: boolean) => void | ||
declare public adjustGainDecibel: (db: number, ms?: number) => void | ||
export class AudioNodeVoice extends AudioNodeComposite { | ||
private _mute: AudioNodeMute | ||
private _gain: AudioNodeGain | ||
private _compensate = 0 | ||
constructor (context: AudioContext, params: { | ||
@@ -54,2 +55,4 @@ equalizer?: boolean, /* whether to enable equalizer */ | ||
} = {}) { | ||
super(context) | ||
/* provide parameter defaults */ | ||
@@ -63,8 +66,7 @@ params.equalizer ??= true | ||
/* initialize aggregation input */ | ||
const nodes = [] as any[] | ||
let compensate = 0 | ||
const nodes = [] as AudioNode[] | ||
/* 0. create: mute controller */ | ||
const mute = new AudioNodeMute(context) | ||
nodes.push(mute) | ||
this._mute = new AudioNodeMute(context) | ||
nodes.push(this._mute) | ||
@@ -84,3 +86,2 @@ /* 1. create: cutting equalizer */ | ||
nodes.push(cutEQ) | ||
/* compensate += 0 */ | ||
} | ||
@@ -92,3 +93,2 @@ | ||
nodes.push(gate) | ||
/* compensate += 0 */ | ||
} | ||
@@ -106,3 +106,3 @@ | ||
nodes.push(comp) | ||
compensate += -2.0 | ||
this._compensate += -2.0 | ||
} | ||
@@ -119,8 +119,8 @@ | ||
nodes.push(boostEQ) | ||
compensate += -1.0 | ||
this._compensate += -1.0 | ||
} | ||
/* 5. create: gain control */ | ||
const gain = new AudioNodeGain(context) | ||
nodes.push(gain) | ||
this._gain = new AudioNodeGain(context) | ||
nodes.push(this._gain) | ||
@@ -137,21 +137,24 @@ /* 6. create: limiter */ | ||
nodes.push(limiter) | ||
compensate += -1.0 | ||
this._compensate += -1.0 | ||
} | ||
/* create composite node */ | ||
const composite = AudioNodeComposite.factory(nodes as AudioNode[]) as unknown as AudioNodeVoice | ||
/* configure composite node chain */ | ||
for (let i = 0; i < nodes.length - 1; i++) | ||
nodes[i].connect(nodes[i + 1]) | ||
this.chain(nodes[0], nodes[nodes.length - 1]) | ||
/* provide mute control */ | ||
composite.mute = (_mute: boolean) => | ||
mute.mute(_mute) | ||
/* pre-set gain */ | ||
this.adjustGainDecibel(params.gain, 0) | ||
} | ||
/* provide gain adjustment */ | ||
composite.adjustGainDecibel = (db, ms = 10) => | ||
gain.adjustGainDecibel(compensate + db, ms) | ||
composite.adjustGainDecibel(compensate + params.gain, 0) | ||
/* provide mute control */ | ||
mute (mute: boolean) { | ||
this._mute.mute(mute) | ||
} | ||
/* create return a composite node */ | ||
return composite | ||
/* provide gain adjustment */ | ||
adjustGainDecibel (db: number, ms = 10) { | ||
this._gain.adjustGainDecibel(this._compensate + db, ms) | ||
} | ||
} | ||
@@ -27,16 +27,18 @@ /*! | ||
/* Composite `AudioNode` subclass by wrapping the node chain from an | ||
input node to an output node (if not given, it is the same as the | ||
input node). The `AudioContext` for the node is taken over from the | ||
input node. */ | ||
input node to an output node */ | ||
export class AudioNodeComposite extends AudioNode { | ||
public input: AudioNode /* the underlying input node */ | ||
public output: AudioNode /* the underlying output node */ | ||
public constructor( | ||
context: AudioContext /* context to associate */ | ||
) | ||
chain( | ||
input: AudioNode, /* input node to wrap */ | ||
output?: AudioNode /* output node to wrap */ | ||
) | ||
): void | ||
bypass( | ||
enable: boolean /* whether to bypass the effects of the node chain */ | ||
bypass: boolean /* whether to bypass the effects of the node chain */ | ||
): void | ||
get input(): AudioNode /* getter for underlying input node */ | ||
get output(): AudioNode /* getter for underlying output node */ | ||
static factory ( | ||
context: AudioContext, /* context to associate */ | ||
nodes: Array<AudioNode> /* (still unlinked) list of nodes to chain sequentially */ | ||
@@ -82,4 +84,2 @@ ): AudioNodeComposite | ||
): void | ||
muted( | ||
): boolean | ||
} | ||
@@ -231,5 +231,9 @@ | ||
) | ||
mute( | ||
mute: boolean, /* whether to mute or unmute */ | ||
ms?: number /* linear adjust time in milliseconds (default: 10) */ | ||
): void | ||
adjustGainDecibel( | ||
db: number, /* target decibel */ | ||
ms?: number /* linear adjust time in milliseconds */ | ||
db: number, /* target decibel */ | ||
ms?: number /* linear adjust time in milliseconds */ | ||
): void | ||
@@ -236,0 +240,0 @@ } |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
166832
3512