Socket
Socket
Sign inDemoInstall

@ryohey/wavelet

Package Overview
Dependencies
2
Maintainers
1
Versions
22
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.4.3 to 0.5.0

.rollup.cache/C/Users/warinside/Documents/GitHub/wavelet/lib/dist/index.test.d.ts

1

.rollup.cache/C/Users/warinside/Documents/GitHub/wavelet/lib/dist/index.d.ts
export * from "./renderer/conversion";
export * from "./renderer/message";
export * from "./renderer/renderAudio";
export * from "./soundfont/loader";
export * from "./SynthEvent";
export * from "./renderer/conversion";
export * from "./renderer/message";
export * from "./renderer/renderAudio";
export * from "./soundfont/loader";
export * from "./SynthEvent";
//# sourceMappingURL=index.js.map

2

.rollup.cache/C/Users/warinside/Documents/GitHub/wavelet/lib/dist/processor/SynthEventHandler.d.ts

@@ -6,2 +6,3 @@ import { ImmediateEvent, MIDIEventBody, SynthEvent } from "../SynthEvent";

private scheduledEvents;
private currentEvents;
private rpnEvents;

@@ -13,3 +14,2 @@ private bankSelectMSB;

processScheduledEvents(): void;
private removeProcessedEvents;
handleImmediateEvent(e: ImmediateEvent): void;

@@ -16,0 +16,0 @@ handleDelayableEvent(e: MIDIEventBody): void;

import { MIDIControlEvents } from "midifile-ts";
import { insertSorted } from "./insertSorted";
import { logger } from "./logger";

@@ -6,2 +7,3 @@ export class SynthEventHandler {

scheduledEvents = [];
currentEvents = [];
rpnEvents = {};

@@ -19,7 +21,6 @@ bankSelectMSB = {};

// handle in process
this.scheduledEvents.push({
insertSorted(this.scheduledEvents, {
...e,
receivedFrame: this.currentFrame,
isProcessed: false,
});
scheduledFrame: this.currentFrame + e.delayTime,
}, "scheduledFrame");
}

@@ -31,17 +32,21 @@ else {

processScheduledEvents() {
for (const e of this.scheduledEvents) {
if (!e.isProcessed &&
e.receivedFrame + e.delayTime <= this.currentFrame) {
this.handleDelayableEvent(e.midi);
e.isProcessed = true;
if (this.scheduledEvents.length === 0) {
return;
}
while (true) {
const e = this.scheduledEvents[0];
if (e === undefined || e.scheduledFrame > this.currentFrame) {
// scheduledEvents are sorted by scheduledFrame,
// so we can break early instead of iterating through all scheduledEvents,
break;
}
this.scheduledEvents.shift();
this.currentEvents.unshift(e);
}
this.removeProcessedEvents();
}
removeProcessedEvents() {
for (let i = this.scheduledEvents.length - 1; i >= 0; i--) {
const ev = this.scheduledEvents[i];
if (ev.isProcessed) {
this.scheduledEvents.splice(i, 1);
while (true) {
const e = this.currentEvents.pop();
if (e === undefined) {
break;
}
this.handleDelayableEvent(e.midi);
}

@@ -167,9 +172,6 @@ }

removeScheduledEvents(channel) {
for (const e of this.scheduledEvents) {
if (e.midi.channel === channel) {
e.isProcessed = true;
}
}
this.scheduledEvents = this.scheduledEvents.filter((e) => e.midi.channel !== channel);
this.currentEvents = this.currentEvents.filter((e) => e.midi.channel !== channel);
}
}
//# sourceMappingURL=SynthEventHandler.js.map

@@ -163,4 +163,3 @@ import { logger } from "./logger";

for (let key in state.oscillators) {
for (let i = state.oscillators[key].length - 1; i >= 0; i--) {
const oscillator = state.oscillators[key][i];
state.oscillators[key] = state.oscillators[key].filter((oscillator) => {
oscillator.speed = Math.pow(2, state.pitchBend / 12);

@@ -172,5 +171,6 @@ oscillator.volume = state.volume * state.expression;

if (!oscillator.isPlaying) {
state.oscillators[key].splice(i, 1);
return false;
}
}
return true;
});
}

@@ -177,0 +177,0 @@ }

@@ -61,5 +61,6 @@ import { AmplitudeEnvelope } from "./AmplitudeEnvelope";

const pitchModulation = pitchLFOValue * this.modulation * (this.modulationDepthRange / 1200);
const modulatedSpeed = speed * (1 + pitchModulation);
for (let i = 0; i < outputs[0].length; ++i) {
const index = Math.floor(this.sampleIndex);
const advancedIndex = this.sampleIndex + speed * (1 + pitchModulation);
const advancedIndex = this.sampleIndex + modulatedSpeed;
let loopIndex = null;

@@ -66,0 +67,0 @@ if (this.sample.loop !== null && advancedIndex >= this.sample.loop.end) {

@@ -9,2 +9,3 @@ import { LoadSampleEvent, SynthEvent } from "../SynthEvent";

sampleRate: number;
bufferSize?: number;
}

@@ -11,0 +12,0 @@ export interface CancelMessage {

import { AudioData, LoadSampleEvent, SynthEvent } from "..";
export interface CancellationToken {
cancelled: boolean;
export interface RenderAudioOptions {
sampleRate?: number;
onProgress?: (numFrames: number, totalFrames: number) => void;
cancel?: () => boolean;
bufferSize?: number;
waitForEventLoop?: () => Promise<void>;
}
export declare const renderAudio: (samples: LoadSampleEvent[], events: SynthEvent[], sampleRate: number, onProgress?: ((numFrames: number, totalFrames: number) => void) | undefined, cancel?: Readonly<CancellationToken> | undefined) => Promise<AudioData>;
export declare const renderAudio: (samples: LoadSampleEvent[], events: SynthEvent[], options?: RenderAudioOptions | undefined) => Promise<AudioData>;
import { SynthProcessorCore } from "../processor/SynthProcessorCore";
// returns in frame unit
const getSongLength = (events) => Math.max(...events.map((e) => (e.type === "midi" ? e.delayTime : 0)));
const Sleep = (time) => new Promise((resolve) => setTimeout(resolve, time));
export const renderAudio = async (samples, events, sampleRate, onProgress, cancel) => {
export const renderAudio = async (samples, events, options) => {
let currentFrame = 0;
const sampleRate = options?.sampleRate ?? 44100;
const bufSize = options?.bufferSize ?? 500;
const synth = new SynthProcessorCore(sampleRate, () => currentFrame);

@@ -11,3 +12,2 @@ samples.forEach((e) => synth.addEvent(e));

const songLengthSec = getSongLength(events);
const bufSize = 128;
const iterCount = Math.ceil(songLengthSec / bufSize);

@@ -17,4 +17,6 @@ const audioBufferSize = iterCount * bufSize;

const rightData = new Float32Array(audioBufferSize);
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)];
for (let i = 0; i < iterCount; i++) {
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)];
buffer[0].fill(0);
buffer[1].fill(0);
synth.process(buffer);

@@ -25,7 +27,7 @@ const offset = i * bufSize;

currentFrame += bufSize;
onProgress?.(offset, audioBufferSize);
// give a chance to terminate the loop
// give a chance to terminate the loop or update progress
if (i % 1000 === 0) {
await Sleep(0);
if (cancel?.cancelled) {
await options?.waitForEventLoop?.();
options?.onProgress?.(offset, audioBufferSize);
if (options?.cancel?.()) {
throw new Error("renderAudio cancelled");

@@ -32,0 +34,0 @@ }

@@ -0,25 +1,25 @@

import { FastSleep } from "./FastSleep";
import { renderAudio } from "./renderAudio";
let cancel = null;
let cancelled = false;
const fastSleep = new FastSleep();
onmessage = async (e) => {
switch (e.data.type) {
case "cancel": {
if (cancel !== null) {
cancel.cancelled = true;
}
cancelled = true;
break;
}
case "start": {
if (cancel !== null) {
throw new Error("rendering is already started.");
}
const { samples, events, sampleRate } = e.data;
cancel = {
cancelled: false,
};
const { samples, events, sampleRate, bufferSize } = e.data;
try {
const audioData = await renderAudio(samples, events, sampleRate, (numBytes, totalBytes) => postMessage({
type: "progress",
numBytes,
totalBytes,
}), cancel);
const audioData = await renderAudio(samples, events, {
sampleRate,
bufferSize,
cancel: () => cancelled,
waitForEventLoop: async () => await fastSleep.wait(),
onProgress: (numBytes, totalBytes) => postMessage({
type: "progress",
numBytes,
totalBytes,
}),
});
postMessage({ type: "complete", audioData }, [

@@ -26,0 +26,0 @@ audioData.leftData,

export * from "./renderer/conversion";
export * from "./renderer/message";
export * from "./renderer/renderAudio";
export * from "./soundfont/loader";
export * from "./SynthEvent";

@@ -16,2 +16,792 @@ 'use strict';

class Logger {
enabled = true;
log(...args) {
if (this.enabled) {
console.log(...args);
}
}
warn(...args) {
if (this.enabled) {
console.warn(...args);
}
}
error(...args) {
if (this.enabled) {
console.error(...args);
}
}
}
const logger = new Logger();
logger.enabled = false;
class SampleTable {
samples = {};
addSample(sample, bank, instrument, keyRange, velRange) {
for (let i = keyRange[0]; i <= keyRange[1]; i++) {
if (this.samples[bank] === undefined) {
this.samples[bank] = {};
}
if (this.samples[bank][instrument] === undefined) {
this.samples[bank][instrument] = {};
}
if (this.samples[bank][instrument][i] === undefined) {
this.samples[bank][instrument][i] = [];
}
this.samples[bank][instrument][i].push({ ...sample, velRange });
}
}
getSamples(bank, instrument, pitch, velocity) {
const samples = this.samples?.[bank]?.[instrument]?.[pitch];
return (samples?.filter((s) => velocity >= s.velRange[0] && velocity <= s.velRange[1]) ?? []);
}
}
var MIDIControlEvents = {
MSB_BANK: 0x00,
MSB_MODWHEEL: 0x01,
MSB_BREATH: 0x02,
MSB_FOOT: 0x04,
MSB_PORTAMENTO_TIME: 0x05,
MSB_DATA_ENTRY: 0x06,
MSB_MAIN_VOLUME: 0x07,
MSB_BALANCE: 0x08,
MSB_PAN: 0x0a,
MSB_EXPRESSION: 0x0b,
MSB_EFFECT1: 0x0c,
MSB_EFFECT2: 0x0d,
MSB_GENERAL_PURPOSE1: 0x10,
MSB_GENERAL_PURPOSE2: 0x11,
MSB_GENERAL_PURPOSE3: 0x12,
MSB_GENERAL_PURPOSE4: 0x13,
LSB_BANK: 0x20,
LSB_MODWHEEL: 0x21,
LSB_BREATH: 0x22,
LSB_FOOT: 0x24,
LSB_PORTAMENTO_TIME: 0x25,
LSB_DATA_ENTRY: 0x26,
LSB_MAIN_VOLUME: 0x27,
LSB_BALANCE: 0x28,
LSB_PAN: 0x2a,
LSB_EXPRESSION: 0x2b,
LSB_EFFECT1: 0x2c,
LSB_EFFECT2: 0x2d,
LSB_GENERAL_PURPOSE1: 0x30,
LSB_GENERAL_PURPOSE2: 0x31,
LSB_GENERAL_PURPOSE3: 0x32,
LSB_GENERAL_PURPOSE4: 0x33,
SUSTAIN: 0x40,
PORTAMENTO: 0x41,
SOSTENUTO: 0x42,
SUSTENUTO: 0x42,
SOFT_PEDAL: 0x43,
LEGATO_FOOTSWITCH: 0x44,
HOLD2: 0x45,
SC1_SOUND_VARIATION: 0x46,
SC2_TIMBRE: 0x47,
SC3_RELEASE_TIME: 0x48,
SC4_ATTACK_TIME: 0x49,
SC5_BRIGHTNESS: 0x4a,
SC6: 0x4b,
SC7: 0x4c,
SC8: 0x4d,
SC9: 0x4e,
SC10: 0x4f,
GENERAL_PURPOSE5: 0x50,
GENERAL_PURPOSE6: 0x51,
GENERAL_PURPOSE7: 0x52,
GENERAL_PURPOSE8: 0x53,
PORTAMENTO_CONTROL: 0x54,
E1_REVERB_DEPTH: 0x5b,
E2_TREMOLO_DEPTH: 0x5c,
E3_CHORUS_DEPTH: 0x5d,
E4_DETUNE_DEPTH: 0x5e,
E5_PHASER_DEPTH: 0x5f,
DATA_INCREMENT: 0x60,
DATA_DECREMENT: 0x61,
NONREG_PARM_NUM_LSB: 0x62,
NONREG_PARM_NUM_MSB: 0x63,
REGIST_PARM_NUM_LSB: 0x64,
REGIST_PARM_NUM_MSB: 0x65,
ALL_SOUNDS_OFF: 0x78,
RESET_CONTROLLERS: 0x79,
LOCAL_CONTROL_SWITCH: 0x7a,
ALL_NOTES_OFF: 0x7b,
OMNI_OFF: 0x7c,
OMNI_ON: 0x7d,
MONO1: 0x7e,
MONO2: 0x7f,
};
function toCharCodes(str) {
var bytes = [];
for (var i = 0; i < str.length; i++) {
bytes.push(str.charCodeAt(i));
}
return bytes;
}
/** @class */ ((function () {
function Buffer() {
this.data = [];
this.position = 0;
}
Object.defineProperty(Buffer.prototype, "length", {
get: function () {
return this.data.length;
},
enumerable: false,
configurable: true
});
Buffer.prototype.writeByte = function (v) {
this.data.push(v);
this.position++;
};
Buffer.prototype.writeStr = function (str) {
this.writeBytes(toCharCodes(str));
};
Buffer.prototype.writeInt32 = function (v) {
this.writeByte((v >> 24) & 0xff);
this.writeByte((v >> 16) & 0xff);
this.writeByte((v >> 8) & 0xff);
this.writeByte(v & 0xff);
};
Buffer.prototype.writeInt16 = function (v) {
this.writeByte((v >> 8) & 0xff);
this.writeByte(v & 0xff);
};
Buffer.prototype.writeBytes = function (arr) {
var _this = this;
arr.forEach(function (v) { return _this.writeByte(v); });
};
Buffer.prototype.writeChunk = function (id, func) {
this.writeStr(id);
var chunkBuf = new Buffer();
func(chunkBuf);
this.writeInt32(chunkBuf.length);
this.writeBytes(chunkBuf.data);
};
Buffer.prototype.toBytes = function () {
return new Uint8Array(this.data);
};
return Buffer;
})());
// https://gist.github.com/fmal/763d9c953c5a5f8b8f9099dbc58da55e
function insertSorted(arr, item, prop) {
let low = 0;
let high = arr.length;
let mid;
while (low < high) {
mid = (low + high) >>> 1; // like (num / 2) but faster
if (arr[mid][prop] < item[prop]) {
low = mid + 1;
}
else {
high = mid;
}
}
arr.splice(low, 0, item);
}
class SynthEventHandler {
processor;
scheduledEvents = [];
currentEvents = [];
rpnEvents = {};
bankSelectMSB = {};
constructor(processor) {
this.processor = processor;
}
get currentFrame() {
return this.processor.currentFrame;
}
addEvent(e) {
logger.log(e);
if ("delayTime" in e) {
// handle in process
insertSorted(this.scheduledEvents, {
...e,
scheduledFrame: this.currentFrame + e.delayTime,
}, "scheduledFrame");
}
else {
this.handleImmediateEvent(e);
}
}
processScheduledEvents() {
if (this.scheduledEvents.length === 0) {
return;
}
while (true) {
const e = this.scheduledEvents[0];
if (e === undefined || e.scheduledFrame > this.currentFrame) {
// scheduledEvents are sorted by scheduledFrame,
// so we can break early instead of iterating through all scheduledEvents,
break;
}
this.scheduledEvents.shift();
this.currentEvents.unshift(e);
}
while (true) {
const e = this.currentEvents.pop();
if (e === undefined) {
break;
}
this.handleDelayableEvent(e.midi);
}
}
handleImmediateEvent(e) {
switch (e.type) {
case "loadSample":
this.processor.loadSample(e.sample, e.bank, e.instrument, e.keyRange, e.velRange);
break;
}
}
handleDelayableEvent(e) {
logger.log("handle delayable event", e);
switch (e.type) {
case "channel": {
switch (e.subtype) {
case "noteOn":
this.processor.noteOn(e.channel, e.noteNumber, e.velocity);
break;
case "noteOff":
this.processor.noteOff(e.channel, e.noteNumber);
break;
case "pitchBend":
this.processor.pitchBend(e.channel, e.value);
break;
case "programChange":
this.processor.programChange(e.channel, e.value);
break;
case "controller": {
switch (e.controllerType) {
case MIDIControlEvents.NONREG_PARM_NUM_MSB:
case MIDIControlEvents.NONREG_PARM_NUM_LSB: // NRPN LSB
// Delete the rpn for do not send NRPN data events
delete this.rpnEvents[e.channel];
break;
case MIDIControlEvents.REGIST_PARM_NUM_MSB: {
if (e.value === 127) {
delete this.rpnEvents[e.channel];
}
else {
this.rpnEvents[e.channel] = {
...this.rpnEvents[e.channel],
rpnMSB: e,
};
}
break;
}
case MIDIControlEvents.REGIST_PARM_NUM_LSB: {
if (e.value === 127) {
delete this.rpnEvents[e.channel];
}
else {
this.rpnEvents[e.channel] = {
...this.rpnEvents[e.channel],
rpnLSB: e,
};
}
break;
}
case MIDIControlEvents.MSB_DATA_ENTRY: {
const rpn = {
...this.rpnEvents[e.channel],
dataMSB: e,
};
this.rpnEvents[e.channel] = rpn;
// In case of pitch bend sensitivity,
// send without waiting for Data LSB event
if (rpn.rpnLSB?.value === 0) {
this.processor.setPitchBendSensitivity(e.channel, rpn.dataMSB.value);
}
break;
}
case MIDIControlEvents.LSB_DATA_ENTRY: {
this.rpnEvents[e.channel] = {
...this.rpnEvents[e.channel],
dataLSB: e,
};
// TODO: Send other RPN events
break;
}
case MIDIControlEvents.MSB_MAIN_VOLUME:
this.processor.setMainVolume(e.channel, e.value);
break;
case MIDIControlEvents.MSB_EXPRESSION:
this.processor.expression(e.channel, e.value);
break;
case MIDIControlEvents.ALL_SOUNDS_OFF:
this.removeScheduledEvents(e.channel);
this.processor.allSoundsOff(e.channel);
break;
case MIDIControlEvents.SUSTAIN:
this.processor.hold(e.channel, e.value);
break;
case MIDIControlEvents.MSB_PAN:
this.processor.setPan(e.channel, e.value);
break;
case MIDIControlEvents.MSB_MODWHEEL:
this.processor.modulation(e.channel, e.value);
break;
case MIDIControlEvents.MSB_BANK:
this.bankSelectMSB[e.channel] = e.value;
break;
case MIDIControlEvents.LSB_BANK: {
const msb = this.bankSelectMSB[e.channel];
if (msb !== undefined) {
const bank = (msb << 7) + e.value;
this.processor.bankSelect(e.channel, bank);
}
break;
}
case MIDIControlEvents.RESET_CONTROLLERS:
this.processor.resetChannel(e.channel);
break;
}
break;
}
}
break;
}
}
}
removeScheduledEvents(channel) {
this.scheduledEvents = this.scheduledEvents.filter((e) => e.midi.channel !== channel);
this.currentEvents = this.currentEvents.filter((e) => e.midi.channel !== channel);
}
}
var EnvelopePhase;
(function (EnvelopePhase) {
EnvelopePhase[EnvelopePhase["attack"] = 0] = "attack";
EnvelopePhase[EnvelopePhase["decay"] = 1] = "decay";
EnvelopePhase[EnvelopePhase["sustain"] = 2] = "sustain";
EnvelopePhase[EnvelopePhase["release"] = 3] = "release";
EnvelopePhase[EnvelopePhase["forceStop"] = 4] = "forceStop";
EnvelopePhase[EnvelopePhase["stopped"] = 5] = "stopped";
})(EnvelopePhase || (EnvelopePhase = {}));
const forceStopReleaseTime = 0.1;
class AmplitudeEnvelope {
parameter;
phase = EnvelopePhase.attack;
lastAmplitude = 0;
sampleRate;
constructor(parameter, sampleRate) {
this.parameter = parameter;
this.sampleRate = sampleRate;
}
noteOn() {
this.phase = EnvelopePhase.attack;
}
noteOff() {
if (this.phase !== EnvelopePhase.forceStop) {
this.phase = EnvelopePhase.release;
}
}
// Rapidly decrease the volume. This method ignores release time parameter
forceStop() {
this.phase = EnvelopePhase.forceStop;
}
getAmplitude(bufferSize) {
const { attackTime, decayTime, sustainLevel, releaseTime } = this.parameter;
const { sampleRate } = this;
// Attack
switch (this.phase) {
case EnvelopePhase.attack: {
const amplificationPerFrame = (1 / (attackTime * sampleRate)) * bufferSize;
const value = this.lastAmplitude + amplificationPerFrame;
if (value >= 1) {
this.phase = EnvelopePhase.decay;
this.lastAmplitude = 1;
return 1;
}
this.lastAmplitude = value;
return value;
}
case EnvelopePhase.decay: {
const attenuationPerFrame = (1 / (decayTime * sampleRate)) * bufferSize;
const value = this.lastAmplitude - attenuationPerFrame;
if (value <= sustainLevel) {
if (sustainLevel <= 0) {
this.phase = EnvelopePhase.stopped;
this.lastAmplitude = 0;
return 0;
}
else {
this.phase = EnvelopePhase.sustain;
this.lastAmplitude = sustainLevel;
return sustainLevel;
}
}
this.lastAmplitude = value;
return value;
}
case EnvelopePhase.sustain: {
return sustainLevel;
}
case EnvelopePhase.release: {
const attenuationPerFrame = (1 / (releaseTime * sampleRate)) * bufferSize;
const value = this.lastAmplitude - attenuationPerFrame;
if (value <= 0) {
this.phase = EnvelopePhase.stopped;
this.lastAmplitude = 0;
return 0;
}
this.lastAmplitude = value;
return value;
}
case EnvelopePhase.forceStop: {
const attenuationPerFrame = (1 / (forceStopReleaseTime * sampleRate)) * bufferSize;
const value = this.lastAmplitude - attenuationPerFrame;
if (value <= 0) {
this.phase = EnvelopePhase.stopped;
this.lastAmplitude = 0;
return 0;
}
this.lastAmplitude = value;
return value;
}
case EnvelopePhase.stopped: {
return 0;
}
}
}
get isPlaying() {
return this.phase !== EnvelopePhase.stopped;
}
}
class LFO {
// Hz
frequency = 5;
phase = 0;
sampleRate;
constructor(sampleRate) {
this.sampleRate = sampleRate;
}
getValue(bufferSize) {
const phase = this.phase;
this.phase +=
((Math.PI * 2 * this.frequency) / this.sampleRate) * bufferSize;
return Math.sin(phase);
}
}
class WavetableOscillator {
sample;
sampleIndex = 0;
_isPlaying = false;
_isNoteOff = false;
baseSpeed = 1;
envelope;
pitchLFO;
sampleRate;
speed = 1;
// 0 to 1
velocity = 1;
// 0 to 1
volume = 1;
modulation = 0;
// cent
modulationDepthRange = 50;
// -1 to 1
pan = 0;
// This oscillator should be note off when hold pedal off
isHold = false;
constructor(sample, sampleRate) {
this.sample = sample;
this.sampleRate = sampleRate;
this.envelope = new AmplitudeEnvelope(sample.amplitudeEnvelope, sampleRate);
this.pitchLFO = new LFO(sampleRate);
}
noteOn(pitch, velocity) {
this.velocity = velocity;
this._isPlaying = true;
this.sampleIndex = this.sample.sampleStart;
this.baseSpeed = Math.pow(2, ((pitch - this.sample.pitch) / 12) * this.sample.scaleTuning);
this.pitchLFO.frequency = 5;
this.envelope.noteOn();
}
noteOff() {
this.envelope.noteOff();
this._isNoteOff = true;
}
forceStop() {
this.envelope.forceStop();
}
process(outputs) {
if (!this._isPlaying) {
return;
}
const speed = (this.baseSpeed * this.speed * this.sample.sampleRate) / this.sampleRate;
const volume = this.velocity * this.volume * this.sample.volume;
// zero to pi/2
const panTheta = ((Math.min(1, Math.max(-1, this.pan + this.sample.pan)) + 1) * Math.PI) /
4;
const leftPanVolume = Math.cos(panTheta);
const rightPanVolume = Math.sin(panTheta);
const gain = this.envelope.getAmplitude(outputs[0].length);
const leftGain = gain * volume * leftPanVolume;
const rightGain = gain * volume * rightPanVolume;
const pitchLFOValue = this.pitchLFO.getValue(outputs[0].length);
const pitchModulation = pitchLFOValue * this.modulation * (this.modulationDepthRange / 1200);
const modulatedSpeed = speed * (1 + pitchModulation);
for (let i = 0; i < outputs[0].length; ++i) {
const index = Math.floor(this.sampleIndex);
const advancedIndex = this.sampleIndex + modulatedSpeed;
let loopIndex = null;
if (this.sample.loop !== null && advancedIndex >= this.sample.loop.end) {
loopIndex =
this.sample.loop.start + (advancedIndex - Math.floor(advancedIndex));
}
const nextIndex = loopIndex !== null
? Math.floor(loopIndex)
: Math.min(index + 1, this.sample.sampleEnd - 1);
// linear interpolation
const current = this.sample.buffer[index];
const next = this.sample.buffer[nextIndex];
const level = current + (next - current) * (this.sampleIndex - index);
outputs[0][i] += level * leftGain;
outputs[1][i] += level * rightGain;
this.sampleIndex = loopIndex ?? advancedIndex;
if (this.sampleIndex >= this.sample.sampleEnd) {
this._isPlaying = false;
break;
}
}
}
get isPlaying() {
return this._isPlaying && this.envelope.isPlaying;
}
get isNoteOff() {
return this._isNoteOff;
}
get exclusiveClass() {
return this.sample.exclusiveClass;
}
}
const initialChannelState = () => ({
volume: 1,
bank: 0,
instrument: 0,
pitchBend: 0,
pitchBendSensitivity: 2,
oscillators: {},
expression: 1,
pan: 0,
modulation: 0,
hold: false,
});
const RHYTHM_CHANNEL = 9;
const RHYTHM_BANK = 128;
class SynthProcessorCore {
sampleTable = new SampleTable();
channels = {};
eventHandler;
sampleRate;
getCurrentFrame;
constructor(sampleRate, getCurrentFrame) {
this.eventHandler = new SynthEventHandler(this);
this.sampleRate = sampleRate;
this.getCurrentFrame = getCurrentFrame;
}
get currentFrame() {
return this.getCurrentFrame();
}
getSamples(channel, pitch, velocity) {
const state = this.getChannelState(channel);
// Play drums for CH.10
const bank = channel === RHYTHM_CHANNEL ? RHYTHM_BANK : state.bank;
return this.sampleTable.getSamples(bank, state.instrument, pitch, velocity);
}
loadSample(sample, bank, instrument, keyRange, velRange) {
const _sample = {
...sample,
buffer: new Float32Array(sample.buffer),
};
this.sampleTable.addSample(_sample, bank, instrument, keyRange, velRange);
}
addEvent(e) {
this.eventHandler.addEvent(e);
}
noteOn(channel, pitch, velocity) {
const state = this.getChannelState(channel);
const samples = this.getSamples(channel, pitch, velocity);
if (samples.length === 0) {
logger.warn(`There is no sample for noteNumber ${pitch} in instrument ${state.instrument} in bank ${state.bank}`);
return;
}
for (const sample of samples) {
const oscillator = new WavetableOscillator(sample, this.sampleRate);
const volume = velocity / 0x80;
oscillator.noteOn(pitch, volume);
if (state.oscillators[pitch] === undefined) {
state.oscillators[pitch] = [];
}
if (sample.exclusiveClass !== undefined) {
for (const key in state.oscillators) {
for (const osc of state.oscillators[key]) {
if (osc.exclusiveClass === sample.exclusiveClass) {
osc.forceStop();
}
}
}
}
state.oscillators[pitch].push(oscillator);
}
}
noteOff(channel, pitch) {
const state = this.getChannelState(channel);
if (state.oscillators[pitch] === undefined) {
return;
}
for (const osc of state.oscillators[pitch]) {
if (!osc.isNoteOff) {
if (state.hold) {
osc.isHold = true;
}
else {
osc.noteOff();
}
}
}
}
pitchBend(channel, value) {
const state = this.getChannelState(channel);
state.pitchBend = (value / 0x2000 - 1) * state.pitchBendSensitivity;
}
programChange(channel, value) {
const state = this.getChannelState(channel);
state.instrument = value;
}
setPitchBendSensitivity(channel, value) {
const state = this.getChannelState(channel);
state.pitchBendSensitivity = value;
}
setMainVolume(channel, value) {
const state = this.getChannelState(channel);
state.volume = value / 0x80;
}
expression(channel, value) {
const state = this.getChannelState(channel);
state.expression = value / 0x80;
}
allSoundsOff(channel) {
const state = this.getChannelState(channel);
for (const key in state.oscillators) {
for (const osc of state.oscillators[key]) {
osc.forceStop();
}
}
}
hold(channel, value) {
const hold = value >= 64;
const state = this.getChannelState(channel);
state.hold = hold;
if (hold) {
return;
}
for (const key in state.oscillators) {
for (const osc of state.oscillators[key]) {
if (osc.isHold) {
osc.noteOff();
}
}
}
}
setPan(channel, value) {
const state = this.getChannelState(channel);
state.pan = (value / 127 - 0.5) * 2;
}
bankSelect(channel, value) {
const state = this.getChannelState(channel);
state.bank = value;
}
modulation(channel, value) {
const state = this.getChannelState(channel);
state.modulation = value / 0x80;
}
resetChannel(channel) {
delete this.channels[channel];
}
getChannelState(channel) {
const state = this.channels[channel];
if (state !== undefined) {
return state;
}
const newState = initialChannelState();
this.channels[channel] = newState;
return newState;
}
process(outputs) {
this.eventHandler.processScheduledEvents();
for (const channel in this.channels) {
const state = this.channels[channel];
for (let key in state.oscillators) {
state.oscillators[key] = state.oscillators[key].filter((oscillator) => {
oscillator.speed = Math.pow(2, state.pitchBend / 12);
oscillator.volume = state.volume * state.expression;
oscillator.pan = state.pan;
oscillator.modulation = state.modulation;
oscillator.process([outputs[0], outputs[1]]);
if (!oscillator.isPlaying) {
return false;
}
return true;
});
}
}
// master volume
const masterVolume = 0.3;
for (let i = 0; i < outputs[0].length; ++i) {
outputs[0][i] *= masterVolume;
outputs[1][i] *= masterVolume;
}
}
}
// returns in frame unit
const getSongLength = (events) => Math.max(...events.map((e) => (e.type === "midi" ? e.delayTime : 0)));
const renderAudio = async (samples, events, options) => {
let currentFrame = 0;
const sampleRate = options?.sampleRate ?? 44100;
const bufSize = options?.bufferSize ?? 500;
const synth = new SynthProcessorCore(sampleRate, () => currentFrame);
samples.forEach((e) => synth.addEvent(e));
events.forEach((e) => synth.addEvent(e));
const songLengthSec = getSongLength(events);
const iterCount = Math.ceil(songLengthSec / bufSize);
const audioBufferSize = iterCount * bufSize;
const leftData = new Float32Array(audioBufferSize);
const rightData = new Float32Array(audioBufferSize);
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)];
for (let i = 0; i < iterCount; i++) {
buffer[0].fill(0);
buffer[1].fill(0);
synth.process(buffer);
const offset = i * bufSize;
leftData.set(buffer[0], offset);
rightData.set(buffer[0], offset);
currentFrame += bufSize;
// give a chance to terminate the loop or update progress
if (i % 1000 === 0) {
await options?.waitForEventLoop?.();
options?.onProgress?.(offset, audioBufferSize);
if (options?.cancel?.()) {
throw new Error("renderAudio cancelled");
}
}
}
return {
length: audioBufferSize,
leftData: leftData.buffer,
rightData: rightData.buffer,
sampleRate,
};
};
var bin = {};

@@ -840,2 +1630,3 @@

exports.getSamplesFromSoundFont = getSamplesFromSoundFont;
exports.renderAudio = renderAudio;
//# sourceMappingURL=index.js.map

@@ -177,5 +177,23 @@ (function () {

// https://gist.github.com/fmal/763d9c953c5a5f8b8f9099dbc58da55e
function insertSorted(arr, item, prop) {
let low = 0;
let high = arr.length;
let mid;
while (low < high) {
mid = (low + high) >>> 1; // like (num / 2) but faster
if (arr[mid][prop] < item[prop]) {
low = mid + 1;
}
else {
high = mid;
}
}
arr.splice(low, 0, item);
}
class SynthEventHandler {
processor;
scheduledEvents = [];
currentEvents = [];
rpnEvents = {};

@@ -193,7 +211,6 @@ bankSelectMSB = {};

// handle in process
this.scheduledEvents.push({
insertSorted(this.scheduledEvents, {
...e,
receivedFrame: this.currentFrame,
isProcessed: false,
});
scheduledFrame: this.currentFrame + e.delayTime,
}, "scheduledFrame");
}

@@ -205,17 +222,21 @@ else {

processScheduledEvents() {
for (const e of this.scheduledEvents) {
if (!e.isProcessed &&
e.receivedFrame + e.delayTime <= this.currentFrame) {
this.handleDelayableEvent(e.midi);
e.isProcessed = true;
if (this.scheduledEvents.length === 0) {
return;
}
while (true) {
const e = this.scheduledEvents[0];
if (e === undefined || e.scheduledFrame > this.currentFrame) {
// scheduledEvents are sorted by scheduledFrame,
// so we can break early instead of iterating through all scheduledEvents,
break;
}
this.scheduledEvents.shift();
this.currentEvents.unshift(e);
}
this.removeProcessedEvents();
}
removeProcessedEvents() {
for (let i = this.scheduledEvents.length - 1; i >= 0; i--) {
const ev = this.scheduledEvents[i];
if (ev.isProcessed) {
this.scheduledEvents.splice(i, 1);
while (true) {
const e = this.currentEvents.pop();
if (e === undefined) {
break;
}
this.handleDelayableEvent(e.midi);
}

@@ -341,7 +362,4 @@ }

removeScheduledEvents(channel) {
for (const e of this.scheduledEvents) {
if (e.midi.channel === channel) {
e.isProcessed = true;
}
}
this.scheduledEvents = this.scheduledEvents.filter((e) => e.midi.channel !== channel);
this.currentEvents = this.currentEvents.filter((e) => e.midi.channel !== channel);
}

@@ -524,5 +542,6 @@ }

const pitchModulation = pitchLFOValue * this.modulation * (this.modulationDepthRange / 1200);
const modulatedSpeed = speed * (1 + pitchModulation);
for (let i = 0; i < outputs[0].length; ++i) {
const index = Math.floor(this.sampleIndex);
const advancedIndex = this.sampleIndex + speed * (1 + pitchModulation);
const advancedIndex = this.sampleIndex + modulatedSpeed;
let loopIndex = null;

@@ -718,4 +737,3 @@ if (this.sample.loop !== null && advancedIndex >= this.sample.loop.end) {

for (let key in state.oscillators) {
for (let i = state.oscillators[key].length - 1; i >= 0; i--) {
const oscillator = state.oscillators[key][i];
state.oscillators[key] = state.oscillators[key].filter((oscillator) => {
oscillator.speed = Math.pow(2, state.pitchBend / 12);

@@ -727,5 +745,6 @@ oscillator.volume = state.volume * state.expression;

if (!oscillator.isPlaying) {
state.oscillators[key].splice(i, 1);
return false;
}
}
return true;
});
}

@@ -732,0 +751,0 @@ }

@@ -6,2 +6,3 @@ import { ImmediateEvent, MIDIEventBody, SynthEvent } from "../SynthEvent";

private scheduledEvents;
private currentEvents;
private rpnEvents;

@@ -13,3 +14,2 @@ private bankSelectMSB;

processScheduledEvents(): void;
private removeProcessedEvents;
handleImmediateEvent(e: ImmediateEvent): void;

@@ -16,0 +16,0 @@ handleDelayableEvent(e: MIDIEventBody): void;

@@ -9,2 +9,3 @@ import { LoadSampleEvent, SynthEvent } from "../SynthEvent";

sampleRate: number;
bufferSize?: number;
}

@@ -11,0 +12,0 @@ export interface CancelMessage {

import { AudioData, LoadSampleEvent, SynthEvent } from "..";
export interface CancellationToken {
cancelled: boolean;
export interface RenderAudioOptions {
sampleRate?: number;
onProgress?: (numFrames: number, totalFrames: number) => void;
cancel?: () => boolean;
bufferSize?: number;
waitForEventLoop?: () => Promise<void>;
}
export declare const renderAudio: (samples: LoadSampleEvent[], events: SynthEvent[], sampleRate: number, onProgress?: ((numFrames: number, totalFrames: number) => void) | undefined, cancel?: Readonly<CancellationToken> | undefined) => Promise<AudioData>;
export declare const renderAudio: (samples: LoadSampleEvent[], events: SynthEvent[], options?: RenderAudioOptions | undefined) => Promise<AudioData>;
(function () {
'use strict';
// https://stackoverflow.com/a/61339321/1567777
class FastSleep {
channel = new MessageChannel();
promiseResolver;
constructor() {
this.channel.port2.onmessage = () => {
this.promiseResolver?.();
};
}
async wait() {
const promise = new Promise((resolve) => {
this.promiseResolver = resolve;
});
this.channel.port1.postMessage(null);
await promise;
}
}
class Logger {

@@ -177,5 +195,23 @@ enabled = true;

// https://gist.github.com/fmal/763d9c953c5a5f8b8f9099dbc58da55e
function insertSorted(arr, item, prop) {
let low = 0;
let high = arr.length;
let mid;
while (low < high) {
mid = (low + high) >>> 1; // like (num / 2) but faster
if (arr[mid][prop] < item[prop]) {
low = mid + 1;
}
else {
high = mid;
}
}
arr.splice(low, 0, item);
}
class SynthEventHandler {
processor;
scheduledEvents = [];
currentEvents = [];
rpnEvents = {};

@@ -193,7 +229,6 @@ bankSelectMSB = {};

// handle in process
this.scheduledEvents.push({
insertSorted(this.scheduledEvents, {
...e,
receivedFrame: this.currentFrame,
isProcessed: false,
});
scheduledFrame: this.currentFrame + e.delayTime,
}, "scheduledFrame");
}

@@ -205,17 +240,21 @@ else {

processScheduledEvents() {
for (const e of this.scheduledEvents) {
if (!e.isProcessed &&
e.receivedFrame + e.delayTime <= this.currentFrame) {
this.handleDelayableEvent(e.midi);
e.isProcessed = true;
if (this.scheduledEvents.length === 0) {
return;
}
while (true) {
const e = this.scheduledEvents[0];
if (e === undefined || e.scheduledFrame > this.currentFrame) {
// scheduledEvents are sorted by scheduledFrame,
// so we can break early instead of iterating through all scheduledEvents,
break;
}
this.scheduledEvents.shift();
this.currentEvents.unshift(e);
}
this.removeProcessedEvents();
}
removeProcessedEvents() {
for (let i = this.scheduledEvents.length - 1; i >= 0; i--) {
const ev = this.scheduledEvents[i];
if (ev.isProcessed) {
this.scheduledEvents.splice(i, 1);
while (true) {
const e = this.currentEvents.pop();
if (e === undefined) {
break;
}
this.handleDelayableEvent(e.midi);
}

@@ -341,7 +380,4 @@ }

removeScheduledEvents(channel) {
for (const e of this.scheduledEvents) {
if (e.midi.channel === channel) {
e.isProcessed = true;
}
}
this.scheduledEvents = this.scheduledEvents.filter((e) => e.midi.channel !== channel);
this.currentEvents = this.currentEvents.filter((e) => e.midi.channel !== channel);
}

@@ -524,5 +560,6 @@ }

const pitchModulation = pitchLFOValue * this.modulation * (this.modulationDepthRange / 1200);
const modulatedSpeed = speed * (1 + pitchModulation);
for (let i = 0; i < outputs[0].length; ++i) {
const index = Math.floor(this.sampleIndex);
const advancedIndex = this.sampleIndex + speed * (1 + pitchModulation);
const advancedIndex = this.sampleIndex + modulatedSpeed;
let loopIndex = null;

@@ -718,4 +755,3 @@ if (this.sample.loop !== null && advancedIndex >= this.sample.loop.end) {

for (let key in state.oscillators) {
for (let i = state.oscillators[key].length - 1; i >= 0; i--) {
const oscillator = state.oscillators[key][i];
state.oscillators[key] = state.oscillators[key].filter((oscillator) => {
oscillator.speed = Math.pow(2, state.pitchBend / 12);

@@ -727,5 +763,6 @@ oscillator.volume = state.volume * state.expression;

if (!oscillator.isPlaying) {
state.oscillators[key].splice(i, 1);
return false;
}
}
return true;
});
}

@@ -744,5 +781,6 @@ }

const getSongLength = (events) => Math.max(...events.map((e) => (e.type === "midi" ? e.delayTime : 0)));
const Sleep = (time) => new Promise((resolve) => setTimeout(resolve, time));
const renderAudio = async (samples, events, sampleRate, onProgress, cancel) => {
const renderAudio = async (samples, events, options) => {
let currentFrame = 0;
const sampleRate = options?.sampleRate ?? 44100;
const bufSize = options?.bufferSize ?? 500;
const synth = new SynthProcessorCore(sampleRate, () => currentFrame);

@@ -752,3 +790,2 @@ samples.forEach((e) => synth.addEvent(e));

const songLengthSec = getSongLength(events);
const bufSize = 128;
const iterCount = Math.ceil(songLengthSec / bufSize);

@@ -758,4 +795,6 @@ const audioBufferSize = iterCount * bufSize;

const rightData = new Float32Array(audioBufferSize);
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)];
for (let i = 0; i < iterCount; i++) {
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)];
buffer[0].fill(0);
buffer[1].fill(0);
synth.process(buffer);

@@ -766,7 +805,7 @@ const offset = i * bufSize;

currentFrame += bufSize;
onProgress?.(offset, audioBufferSize);
// give a chance to terminate the loop
// give a chance to terminate the loop or update progress
if (i % 1000 === 0) {
await Sleep(0);
if (cancel?.cancelled) {
await options?.waitForEventLoop?.();
options?.onProgress?.(offset, audioBufferSize);
if (options?.cancel?.()) {
throw new Error("renderAudio cancelled");

@@ -784,25 +823,24 @@ }

let cancel = null;
let cancelled = false;
const fastSleep = new FastSleep();
onmessage = async (e) => {
switch (e.data.type) {
case "cancel": {
if (cancel !== null) {
cancel.cancelled = true;
}
cancelled = true;
break;
}
case "start": {
if (cancel !== null) {
throw new Error("rendering is already started.");
}
const { samples, events, sampleRate } = e.data;
cancel = {
cancelled: false,
};
const { samples, events, sampleRate, bufferSize } = e.data;
try {
const audioData = await renderAudio(samples, events, sampleRate, (numBytes, totalBytes) => postMessage({
type: "progress",
numBytes,
totalBytes,
}), cancel);
const audioData = await renderAudio(samples, events, {
sampleRate,
bufferSize,
cancel: () => cancelled,
waitForEventLoop: async () => await fastSleep.wait(),
onProgress: (numBytes, totalBytes) => postMessage({
type: "progress",
numBytes,
totalBytes,
}),
});
postMessage({ type: "complete", audioData }, [

@@ -809,0 +847,0 @@ audioData.leftData,

{
"name": "@ryohey/wavelet",
"version": "0.4.3",
"version": "0.5.0",
"description": "A wavetable synthesizer that never stops the UI thread created by AudioWorklet.",

@@ -9,3 +9,4 @@ "main": "dist/index.js",

"start": "rollup --config --watch",
"build": "rollup --config"
"build": "rollup --config",
"test": "jest"
},

@@ -23,3 +24,6 @@ "author": "ryohey",

"@types/audioworklet": "^0.0.22",
"@types/jest": "^27.0.3",
"jest": "^27.4.3",
"rollup": "^2.60.2",
"ts-jest": "^27.1.0",
"tslib": "^2.3.1",

@@ -26,0 +30,0 @@ "typescript": "^4.5.2"

export * from "./renderer/conversion"
export * from "./renderer/message"
export * from "./renderer/renderAudio"
export * from "./soundfont/loader"
export * from "./SynthEvent"

@@ -9,6 +9,7 @@ import { ControllerEvent, MIDIControlEvents } from "midifile-ts"

import { DistributiveOmit } from "../types"
import { insertSorted } from "./insertSorted"
import { logger } from "./logger"
import { SynthProcessorCore } from "./SynthProcessorCore"
type DelayedEvent = MIDIEvent & { receivedFrame: number; isProcessed: boolean }
type DelayedEvent = MIDIEvent & { scheduledFrame: number }
type RPNControllerEvent = DistributiveOmit<ControllerEvent, "deltaTime">

@@ -26,2 +27,3 @@

private scheduledEvents: DelayedEvent[] = []
private currentEvents: DelayedEvent[] = []
private rpnEvents: { [channel: number]: RPN | undefined } = {}

@@ -43,7 +45,10 @@ private bankSelectMSB: { [channel: number]: number | undefined } = {}

// handle in process
this.scheduledEvents.push({
...e,
receivedFrame: this.currentFrame,
isProcessed: false,
})
insertSorted(
this.scheduledEvents,
{
...e,
scheduledFrame: this.currentFrame + e.delayTime,
},
"scheduledFrame"
)
} else {

@@ -55,21 +60,23 @@ this.handleImmediateEvent(e)

processScheduledEvents() {
for (const e of this.scheduledEvents) {
if (
!e.isProcessed &&
e.receivedFrame + e.delayTime <= this.currentFrame
) {
this.handleDelayableEvent(e.midi)
e.isProcessed = true
if (this.scheduledEvents.length === 0) {
return
}
while (true) {
const e = this.scheduledEvents[0]
if (e === undefined || e.scheduledFrame > this.currentFrame) {
// scheduledEvents are sorted by scheduledFrame,
// so we can break early instead of iterating through all scheduledEvents,
break
}
this.scheduledEvents.shift()
this.currentEvents.unshift(e)
}
this.removeProcessedEvents()
}
private removeProcessedEvents() {
for (let i = this.scheduledEvents.length - 1; i >= 0; i--) {
const ev = this.scheduledEvents[i]
if (ev.isProcessed) {
this.scheduledEvents.splice(i, 1)
while (true) {
const e = this.currentEvents.pop()
if (e === undefined) {
break
}
this.handleDelayableEvent(e.midi)
}

@@ -207,8 +214,9 @@ }

private removeScheduledEvents(channel: number) {
for (const e of this.scheduledEvents) {
if (e.midi.channel === channel) {
e.isProcessed = true
}
}
this.scheduledEvents = this.scheduledEvents.filter(
(e) => e.midi.channel !== channel
)
this.currentEvents = this.currentEvents.filter(
(e) => e.midi.channel !== channel
)
}
}

@@ -227,5 +227,3 @@ import { SampleData, SynthEvent } from "../SynthEvent"

for (let key in state.oscillators) {
for (let i = state.oscillators[key].length - 1; i >= 0; i--) {
const oscillator = state.oscillators[key][i]
state.oscillators[key] = state.oscillators[key].filter((oscillator) => {
oscillator.speed = Math.pow(2, state.pitchBend / 12)

@@ -238,5 +236,6 @@ oscillator.volume = state.volume * state.expression

if (!oscillator.isPlaying) {
state.oscillators[key].splice(i, 1)
return false
}
}
return true
})
}

@@ -243,0 +242,0 @@ }

@@ -82,6 +82,7 @@ import { SampleData } from "../SynthEvent"

pitchLFOValue * this.modulation * (this.modulationDepthRange / 1200)
const modulatedSpeed = speed * (1 + pitchModulation)
for (let i = 0; i < outputs[0].length; ++i) {
const index = Math.floor(this.sampleIndex)
const advancedIndex = this.sampleIndex + speed * (1 + pitchModulation)
const advancedIndex = this.sampleIndex + modulatedSpeed
let loopIndex: number | null = null

@@ -88,0 +89,0 @@

@@ -11,2 +11,3 @@ import { LoadSampleEvent, SynthEvent } from "../SynthEvent"

sampleRate: number
bufferSize?: number
}

@@ -13,0 +14,0 @@

@@ -8,7 +8,8 @@ import { AudioData, LoadSampleEvent, SynthEvent } from ".."

const Sleep = (time: number) =>
new Promise((resolve) => setTimeout(resolve, time))
export interface CancellationToken {
cancelled: boolean
export interface RenderAudioOptions {
sampleRate?: number
onProgress?: (numFrames: number, totalFrames: number) => void
cancel?: () => boolean
bufferSize?: number
waitForEventLoop?: () => Promise<void>
}

@@ -19,7 +20,8 @@

events: SynthEvent[],
sampleRate: number,
onProgress?: (numFrames: number, totalFrames: number) => void,
cancel?: Readonly<CancellationToken>
options?: RenderAudioOptions
): Promise<AudioData> => {
let currentFrame = 0
const sampleRate = options?.sampleRate ?? 44100
const bufSize = options?.bufferSize ?? 500
const synth = new SynthProcessorCore(sampleRate, () => currentFrame)

@@ -31,3 +33,2 @@

const songLengthSec = getSongLength(events)
const bufSize = 128
const iterCount = Math.ceil(songLengthSec / bufSize)

@@ -39,4 +40,7 @@ const audioBufferSize = iterCount * bufSize

const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)]
for (let i = 0; i < iterCount; i++) {
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)]
buffer[0].fill(0)
buffer[1].fill(0)
synth.process(buffer)

@@ -47,9 +51,10 @@ const offset = i * bufSize

currentFrame += bufSize
onProgress?.(offset, audioBufferSize)
// give a chance to terminate the loop
// give a chance to terminate the loop or update progress
if (i % 1000 === 0) {
await Sleep(0)
await options?.waitForEventLoop?.()
if (cancel?.cancelled) {
options?.onProgress?.(offset, audioBufferSize)
if (options?.cancel?.()) {
throw new Error("renderAudio cancelled")

@@ -56,0 +61,0 @@ }

import { InMessage } from ".."
import { FastSleep } from "./FastSleep"
import { CompleteMessage, ProgressMessage } from "./message"
import { CancellationToken, renderAudio } from "./renderAudio"
import { renderAudio } from "./renderAudio"

@@ -12,29 +13,22 @@ declare global {

let cancel: CancellationToken | null = null
let cancelled: boolean = false
const fastSleep = new FastSleep()
onmessage = async (e: MessageEvent<InMessage>) => {
switch (e.data.type) {
case "cancel": {
if (cancel !== null) {
cancel.cancelled = true
}
cancelled = true
break
}
case "start": {
if (cancel !== null) {
throw new Error("rendering is already started.")
}
const { samples, events, sampleRate, bufferSize } = e.data
const { samples, events, sampleRate } = e.data
cancel = {
cancelled: false,
}
try {
const audioData = await renderAudio(
samples,
events,
const audioData = await renderAudio(samples, events, {
sampleRate,
(numBytes, totalBytes) =>
bufferSize,
cancel: () => cancelled,
waitForEventLoop: async () => await fastSleep.wait(),
onProgress: (numBytes, totalBytes) =>
postMessage({

@@ -45,4 +39,3 @@ type: "progress",

}),
cancel
)
})
postMessage({ type: "complete", audioData }, [

@@ -49,0 +42,0 @@ audioData.leftData,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc