@ryohey/wavelet
Advanced tools
Comparing version 0.5.3 to 0.6.0
@@ -0,0 +0,0 @@ export * from "./renderer/conversion"; |
'use strict'; | ||
Object.defineProperty(exports, '__esModule', { value: true }); | ||
const audioDataToAudioBuffer = (audioData) => { | ||
@@ -779,2 +777,12 @@ const audioBuffer = new AudioBuffer({ | ||
const getSongLength = (events) => Math.max(...events.map((e) => (e.type === "midi" ? e.delayTime : 0))); | ||
// Maximum time to wait for the note release sound to become silent | ||
const silentTimeoutSec = 5; | ||
const isArrayZero = (arr) => { | ||
for (let i = 0; i < arr.length; i++) { | ||
if (arr[i] !== 0) { | ||
return false; | ||
} | ||
} | ||
return true; | ||
}; | ||
const renderAudio = async (samples, events, options) => { | ||
@@ -787,9 +795,11 @@ let currentFrame = 0; | ||
events.forEach((e) => synth.addEvent(e)); | ||
const songLengthSec = getSongLength(events); | ||
const iterCount = Math.ceil(songLengthSec / bufSize); | ||
const audioBufferSize = iterCount * bufSize; | ||
const songLengthFrame = getSongLength(events); | ||
const iterCount = Math.ceil(songLengthFrame / bufSize); | ||
const additionalIterCount = Math.ceil((silentTimeoutSec * sampleRate) / bufSize); | ||
const allIterCount = iterCount + additionalIterCount; | ||
const audioBufferSize = allIterCount * bufSize; | ||
const leftData = new Float32Array(audioBufferSize); | ||
const rightData = new Float32Array(audioBufferSize); | ||
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)]; | ||
for (let i = 0; i < iterCount; i++) { | ||
for (let i = 0; i < allIterCount; i++) { | ||
buffer[0].fill(0); | ||
@@ -800,4 +810,9 @@ buffer[1].fill(0); | ||
leftData.set(buffer[0], offset); | ||
rightData.set(buffer[0], offset); | ||
rightData.set(buffer[1], offset); | ||
currentFrame += bufSize; | ||
// Wait for silence after playback is complete. | ||
if (i > iterCount && isArrayZero(buffer[0]) && isArrayZero(buffer[1])) { | ||
console.log(`early break ${i} in ${iterCount + additionalIterCount}`); | ||
break; | ||
} | ||
// give a chance to terminate the loop or update progress | ||
@@ -812,6 +827,9 @@ if (i % 1000 === 0) { | ||
} | ||
// slice() to delete silent parts | ||
const trimmedLeft = leftData.slice(0, currentFrame); | ||
const trimmedRight = rightData.slice(0, currentFrame); | ||
return { | ||
length: audioBufferSize, | ||
leftData: leftData.buffer, | ||
rightData: rightData.buffer, | ||
length: trimmedLeft.length, | ||
leftData: trimmedLeft.buffer, | ||
rightData: trimmedRight.buffer, | ||
sampleRate, | ||
@@ -818,0 +836,0 @@ }; |
@@ -0,0 +0,0 @@ (function () { |
@@ -0,0 +0,0 @@ export interface AmplitudeEnvelopeParameter { |
export declare function insertSorted<T>(arr: T[], item: T, prop: keyof T): void; |
@@ -0,0 +0,0 @@ export declare class LFO { |
@@ -0,0 +0,0 @@ export declare class Logger { |
export {}; |
import { SampleData } from "../SynthEvent"; | ||
declare type Sample = SampleData<Float32Array>; | ||
export declare type SampleTableItem = Sample & { | ||
type Sample = SampleData<Float32Array>; | ||
export type SampleTableItem = Sample & { | ||
velRange: [number, number]; | ||
@@ -5,0 +5,0 @@ }; |
@@ -0,0 +0,0 @@ import { ImmediateEvent, MIDIEventBody, SynthEvent } from "../SynthEvent"; |
@@ -0,0 +0,0 @@ export declare class SynthProcessor extends AudioWorkletProcessor { |
@@ -0,0 +0,0 @@ import { SampleData, SynthEvent } from "../SynthEvent"; |
@@ -0,0 +0,0 @@ import { SampleData } from "../SynthEvent"; |
import { AudioData } from "./message"; | ||
export declare const audioDataToAudioBuffer: (audioData: AudioData) => AudioBuffer; |
@@ -0,0 +0,0 @@ export declare class FastSleep { |
import { LoadSampleEvent, SynthEvent } from "../SynthEvent"; | ||
export declare type InMessage = StartMessage | CancelMessage; | ||
export declare type OutMessage = ProgressMessage | CompleteMessage; | ||
export type InMessage = StartMessage | CancelMessage; | ||
export type OutMessage = ProgressMessage | CompleteMessage; | ||
export interface StartMessage { | ||
@@ -25,5 +25,5 @@ type: "start"; | ||
} | ||
export declare type CompleteMessage = { | ||
export type CompleteMessage = { | ||
type: "complete"; | ||
audioData: AudioData; | ||
}; |
@@ -9,2 +9,2 @@ import { AudioData, LoadSampleEvent, SynthEvent } from ".."; | ||
} | ||
export declare const renderAudio: (samples: LoadSampleEvent[], events: SynthEvent[], options?: RenderAudioOptions | undefined) => Promise<AudioData>; | ||
export declare const renderAudio: (samples: LoadSampleEvent[], events: SynthEvent[], options?: RenderAudioOptions) => Promise<AudioData>; |
@@ -0,0 +0,0 @@ import { CompleteMessage, ProgressMessage } from "./message"; |
@@ -785,2 +785,12 @@ (function () { | ||
const getSongLength = (events) => Math.max(...events.map((e) => (e.type === "midi" ? e.delayTime : 0))); | ||
// Maximum time to wait for the note release sound to become silent | ||
const silentTimeoutSec = 5; | ||
const isArrayZero = (arr) => { | ||
for (let i = 0; i < arr.length; i++) { | ||
if (arr[i] !== 0) { | ||
return false; | ||
} | ||
} | ||
return true; | ||
}; | ||
const renderAudio = async (samples, events, options) => { | ||
@@ -793,9 +803,11 @@ let currentFrame = 0; | ||
events.forEach((e) => synth.addEvent(e)); | ||
const songLengthSec = getSongLength(events); | ||
const iterCount = Math.ceil(songLengthSec / bufSize); | ||
const audioBufferSize = iterCount * bufSize; | ||
const songLengthFrame = getSongLength(events); | ||
const iterCount = Math.ceil(songLengthFrame / bufSize); | ||
const additionalIterCount = Math.ceil((silentTimeoutSec * sampleRate) / bufSize); | ||
const allIterCount = iterCount + additionalIterCount; | ||
const audioBufferSize = allIterCount * bufSize; | ||
const leftData = new Float32Array(audioBufferSize); | ||
const rightData = new Float32Array(audioBufferSize); | ||
const buffer = [new Float32Array(bufSize), new Float32Array(bufSize)]; | ||
for (let i = 0; i < iterCount; i++) { | ||
for (let i = 0; i < allIterCount; i++) { | ||
buffer[0].fill(0); | ||
@@ -806,4 +818,9 @@ buffer[1].fill(0); | ||
leftData.set(buffer[0], offset); | ||
rightData.set(buffer[0], offset); | ||
rightData.set(buffer[1], offset); | ||
currentFrame += bufSize; | ||
// Wait for silence after playback is complete. | ||
if (i > iterCount && isArrayZero(buffer[0]) && isArrayZero(buffer[1])) { | ||
console.log(`early break ${i} in ${iterCount + additionalIterCount}`); | ||
break; | ||
} | ||
// give a chance to terminate the loop or update progress | ||
@@ -818,6 +835,9 @@ if (i % 1000 === 0) { | ||
} | ||
// slice() to delete silent parts | ||
const trimmedLeft = leftData.slice(0, currentFrame); | ||
const trimmedRight = rightData.slice(0, currentFrame); | ||
return { | ||
length: audioBufferSize, | ||
leftData: leftData.buffer, | ||
rightData: rightData.buffer, | ||
length: trimmedLeft.length, | ||
leftData: trimmedLeft.buffer, | ||
rightData: trimmedRight.buffer, | ||
sampleRate, | ||
@@ -824,0 +844,0 @@ }; |
import { SampleData } from "../SynthEvent"; | ||
export declare type SoundFontSample = SampleData<ArrayBuffer> & { | ||
export type SoundFontSample = SampleData<ArrayBuffer> & { | ||
bank: number; | ||
@@ -4,0 +4,0 @@ instrument: number; |
import { LoadSampleEvent, SoundFontSample } from ".."; | ||
export declare const sampleToSynthEvent: (sample: SoundFontSample) => LoadSampleEvent; |
@@ -30,4 +30,4 @@ import { AnyChannelEvent } from "midifile-ts"; | ||
} | ||
export declare type MIDIEventBody = DistributiveOmit<AnyChannelEvent, "deltaTime">; | ||
export declare type MIDIEvent = { | ||
export type MIDIEventBody = DistributiveOmit<AnyChannelEvent, "deltaTime">; | ||
export type MIDIEvent = { | ||
type: "midi"; | ||
@@ -37,4 +37,4 @@ midi: MIDIEventBody; | ||
}; | ||
export declare type ImmediateEvent = LoadSampleEvent; | ||
export declare type SynthEvent = ImmediateEvent | MIDIEvent; | ||
export type ImmediateEvent = LoadSampleEvent; | ||
export type SynthEvent = ImmediateEvent | MIDIEvent; | ||
export declare const DrumInstrumentNumber = 128; |
@@ -1,1 +0,1 @@ | ||
export declare type DistributiveOmit<T, K extends keyof any> = T extends any ? Omit<T, K> : never; | ||
export type DistributiveOmit<T, K extends keyof any> = T extends any ? Omit<T, K> : never; |
{ | ||
"name": "@ryohey/wavelet", | ||
"version": "0.5.3", | ||
"version": "0.6.0", | ||
"description": "A wavetable synthesizer that never stops the UI thread created by AudioWorklet.", | ||
"main": "dist/index.js", | ||
"types": "dist/index.d.ts", | ||
"type": "module", | ||
"scripts": { | ||
@@ -16,16 +17,16 @@ "start": "rollup --config --watch", | ||
"@ryohey/sf2parser": "^1.2.0", | ||
"midifile-ts": "^1.4.0" | ||
"midifile-ts": "^1.5.1" | ||
}, | ||
"devDependencies": { | ||
"@rollup/plugin-commonjs": "^21.0.1", | ||
"@rollup/plugin-node-resolve": "^13.0.6", | ||
"@rollup/plugin-typescript": "^8.3.0", | ||
"@types/audioworklet": "^0.0.22", | ||
"@types/jest": "^27.0.3", | ||
"jest": "^27.4.3", | ||
"rollup": "^2.60.2", | ||
"ts-jest": "^27.1.0", | ||
"tslib": "^2.3.1", | ||
"typescript": "^4.5.2" | ||
"@rollup/plugin-commonjs": "^24.0.0", | ||
"@rollup/plugin-node-resolve": "^15.0.1", | ||
"@rollup/plugin-typescript": "^10.0.1", | ||
"@types/audioworklet": "^0.0.36", | ||
"@types/jest": "^29.2.4", | ||
"jest": "^29.3.1", | ||
"rollup": "^3.7.5", | ||
"ts-jest": "^29.0.3", | ||
"tslib": "^2.4.1", | ||
"typescript": "^4.9.4" | ||
} | ||
} |
@@ -0,0 +0,0 @@ import commonjs from "@rollup/plugin-commonjs" |
declare module "string-to-arraybuffer" { | ||
export default function stringToArrayBuffer(arg: string): ArrayBuffer | ||
} |
@@ -0,0 +0,0 @@ class AudioWorkletProcessor { |
describe("wavelet", () => { | ||
it("has a test", () => {}) | ||
}) |
@@ -0,0 +0,0 @@ export * from "./renderer/conversion" |
@@ -0,0 +0,0 @@ export interface AmplitudeEnvelopeParameter { |
@@ -0,0 +0,0 @@ // https://gist.github.com/fmal/763d9c953c5a5f8b8f9099dbc58da55e |
@@ -0,0 +0,0 @@ export class LFO { |
@@ -0,0 +0,0 @@ export class Logger { |
import { SynthProcessor } from "./SynthProcessor" | ||
registerProcessor("synth-processor", SynthProcessor) |
@@ -0,0 +0,0 @@ import { SampleData } from "../SynthEvent" |
@@ -0,0 +0,0 @@ import { ControllerEvent, MIDIControlEvents } from "midifile-ts" |
@@ -0,0 +0,0 @@ import { SynthEvent } from ".." |
@@ -0,0 +0,0 @@ import { SampleData, SynthEvent } from "../SynthEvent" |
@@ -0,0 +0,0 @@ import { SampleData } from "../SynthEvent" |
@@ -0,0 +0,0 @@ import { AudioData } from "./message" |
@@ -0,0 +0,0 @@ // https://stackoverflow.com/a/61339321/1567777 |
@@ -0,0 +0,0 @@ import { LoadSampleEvent, SynthEvent } from "../SynthEvent" |
@@ -8,2 +8,5 @@ import { AudioData, LoadSampleEvent, SynthEvent } from ".." | ||
// Maximum time to wait for the note release sound to become silent | ||
const silentTimeoutSec = 5 | ||
export interface RenderAudioOptions { | ||
@@ -17,2 +20,11 @@ sampleRate?: number | ||
const isArrayZero = <T>(arr: ArrayLike<T>) => { | ||
for (let i = 0; i < arr.length; i++) { | ||
if (arr[i] !== 0) { | ||
return false | ||
} | ||
} | ||
return true | ||
} | ||
export const renderAudio = async ( | ||
@@ -32,5 +44,9 @@ samples: LoadSampleEvent[], | ||
const songLengthSec = getSongLength(events) | ||
const iterCount = Math.ceil(songLengthSec / bufSize) | ||
const audioBufferSize = iterCount * bufSize | ||
const songLengthFrame = getSongLength(events) | ||
const iterCount = Math.ceil(songLengthFrame / bufSize) | ||
const additionalIterCount = Math.ceil( | ||
(silentTimeoutSec * sampleRate) / bufSize | ||
) | ||
const allIterCount = iterCount + additionalIterCount | ||
const audioBufferSize = allIterCount * bufSize | ||
@@ -42,3 +58,3 @@ const leftData = new Float32Array(audioBufferSize) | ||
for (let i = 0; i < iterCount; i++) { | ||
for (let i = 0; i < allIterCount; i++) { | ||
buffer[0].fill(0) | ||
@@ -49,5 +65,11 @@ buffer[1].fill(0) | ||
leftData.set(buffer[0], offset) | ||
rightData.set(buffer[0], offset) | ||
rightData.set(buffer[1], offset) | ||
currentFrame += bufSize | ||
// Wait for silence after playback is complete. | ||
if (i > iterCount && isArrayZero(buffer[0]) && isArrayZero(buffer[1])) { | ||
console.log(`early break ${i} in ${iterCount + additionalIterCount}`) | ||
break | ||
} | ||
// give a chance to terminate the loop or update progress | ||
@@ -65,8 +87,12 @@ if (i % 1000 === 0) { | ||
// slice() to delete silent parts | ||
const trimmedLeft = leftData.slice(0, currentFrame) | ||
const trimmedRight = rightData.slice(0, currentFrame) | ||
return { | ||
length: audioBufferSize, | ||
leftData: leftData.buffer, | ||
rightData: rightData.buffer, | ||
length: trimmedLeft.length, | ||
leftData: trimmedLeft.buffer, | ||
rightData: trimmedRight.buffer, | ||
sampleRate, | ||
} | ||
} |
@@ -0,0 +0,0 @@ import { InMessage } from ".." |
@@ -0,0 +0,0 @@ import { |
@@ -0,0 +0,0 @@ import { LoadSampleEvent, SoundFontSample } from ".." |
@@ -42,2 +42,4 @@ import { AnyChannelEvent } from "midifile-ts" | ||
midi: MIDIEventBody | ||
// Time to delay the playback of an event. Number of frames | ||
// delayInSeconds = delayTime / sampleRate | ||
delayTime: number | ||
@@ -44,0 +46,0 @@ } |
export type DistributiveOmit<T, K extends keyof any> = T extends any | ||
? Omit<T, K> | ||
: never |
@@ -0,0 +0,0 @@ { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Yes
489229
52
4690
Updatedmidifile-ts@^1.5.1