@euterpe.js/player
Advanced tools
Comparing version 1.0.22 to 1.0.23
{ | ||
"name": "@euterpe.js/player", | ||
"version": "1.0.22", | ||
"version": "1.0.23", | ||
"type": "module", | ||
@@ -33,5 +33,5 @@ "description": "A simple, safe AudioContext web music player", | ||
"peerDependencies": { | ||
"tslib": "2.5.2" | ||
"tslib": "2.6.0" | ||
}, | ||
"types": "./src/index.d.ts" | ||
} |
@@ -58,3 +58,3 @@ # Euterpe.js Player | ||
// Subscriptions to AudioContext changes, eg. time.. | ||
music_player.subscribe_to_formatted_duration_time((time) => { | ||
music_player.on_duration_formatted((time) => { | ||
//time == "4:53, "15:59", "1756:15:59"... | ||
@@ -66,3 +66,3 @@ document.querySelector("#duration-text").innerHTML = time | ||
//Keep the current time uptodate but formatted. | ||
music_player.subscribe_to_formatted_current_time_tick((time) => { | ||
music_player.on_time_tick_formatted((time) => { | ||
//time == "2:52", "10:59:59"... | ||
@@ -72,3 +72,3 @@ document.querySelector("#current-text").innerHTML = time | ||
//Keep <input type="range"..> slider uptodate | ||
music_player.subscribe_to_time_tick((time) => { | ||
music_player.on_time_tick((time) => { | ||
//time == "0","1.2", "1223.21668181"... | ||
@@ -75,0 +75,0 @@ document.querySelector("#input-seek-range").value = "" + time |
@@ -11,67 +11,120 @@ export declare enum SubscribeEvents { | ||
} | ||
export declare const MusicPlayer: (audio_context_i: AudioContext, audio_element_i: HTMLAudioElement, track_i: MediaElementAudioSourceNode, gain_i: GainNode, volume_i: number, current_song_path_i?: string) => { | ||
export declare class MusicPlayer { | ||
#private; | ||
private audio_context; | ||
private audio_element; | ||
track: MediaElementAudioSourceNode; | ||
get_is_playing: () => boolean; | ||
get_current_path: () => string | undefined; | ||
get_current_duration: () => number; | ||
private gain; | ||
volume: number; | ||
private current_song_path?; | ||
current_song_duration: number; | ||
is_playing: boolean; | ||
time: number; | ||
mute: () => void; | ||
unmute: () => void; | ||
mute_toggle: () => void; | ||
change_volume: (volume_i: number) => void; | ||
try_seek_async: (new_time: number) => Promise<unknown>; | ||
seek_async: (new_time: number) => Promise<unknown>; | ||
seek: (new_time: number) => void; | ||
play: () => void; | ||
pause: () => void; | ||
play_toggle: () => void; | ||
play_toggle_async: () => Promise<unknown>; | ||
try_play_toggle_async: () => Promise<unknown>; | ||
try_new_song_async: (path: string) => Promise<unknown>; | ||
new_song: (path: string) => void; | ||
get_formatted_duration: () => string; | ||
get_formatted_current_time: () => string; | ||
subscribe_to_formatted_current_time_tick: (callback: (data: any) => void) => void; | ||
subscribe_to_formatted_duration_time: (callback: (data: any) => void) => void; | ||
subscribe_to_time_tick: (callback: (data: any) => void) => void; | ||
try_play_async: () => Promise<unknown>; | ||
play_async: () => Promise<unknown>; | ||
}; | ||
export declare function MusicPlayerBuilder(audio_element: HTMLAudioElement): { | ||
start: () => void; | ||
add_analyser: () => AnalyserNode; | ||
add_stereo_panner_node: () => StereoPannerNode; | ||
add_wave_shaper_node: () => WaveShaperNode; | ||
connect_gain: () => void; | ||
connect_custom_node: (node: AudioNode) => void; | ||
build: () => { | ||
track: MediaElementAudioSourceNode; | ||
get_is_playing: () => boolean; | ||
get_current_path: () => string | undefined; | ||
get_current_duration: () => number; | ||
volume: number; | ||
time: number; | ||
mute: () => void; | ||
unmute: () => void; | ||
mute_toggle: () => void; | ||
change_volume: (volume_i: number) => void; | ||
try_seek_async: (new_time: number) => Promise<unknown>; | ||
seek_async: (new_time: number) => Promise<unknown>; | ||
seek: (new_time: number) => void; | ||
play: () => void; | ||
pause: () => void; | ||
play_toggle: () => void; | ||
play_toggle_async: () => Promise<unknown>; | ||
try_play_toggle_async: () => Promise<unknown>; | ||
try_new_song_async: (path: string) => Promise<unknown>; | ||
new_song: (path: string) => void; | ||
get_formatted_duration: () => string; | ||
get_formatted_current_time: () => string; | ||
subscribe_to_formatted_current_time_tick: (callback: (data: any) => void) => void; | ||
subscribe_to_formatted_duration_time: (callback: (data: any) => void) => void; | ||
subscribe_to_time_tick: (callback: (data: any) => void) => void; | ||
try_play_async: () => Promise<unknown>; | ||
play_async: () => Promise<unknown>; | ||
}; | ||
}; | ||
constructor(audio_context: AudioContext, audio_element: HTMLAudioElement, track: MediaElementAudioSourceNode, gain: GainNode, volume: number, current_song_path?: string | undefined); | ||
mute_toggle(): void; | ||
mute(): void; | ||
unmute(): void; | ||
change_volume(volume_i: number): void; | ||
/** | ||
* Safer seek_async. Normal seek will try to start the player even if the track hasn't started yet, or was previously suspended/closed | ||
*/ | ||
try_seek_async(new_time: number): Promise<unknown>; | ||
seek(new_time: number): void; | ||
/** | ||
* Safer play_toggle_async. Normal play_toggle will try to start the player even if the track hasn't started yet, or was previously suspended/closed | ||
*/ | ||
try_play_toggle_async(): Promise<unknown>; | ||
/** | ||
* Can try to play even if the audio context was suspended or closed. Best to use try_play_toggle_async() | ||
*/ | ||
play_toggle_async(): Promise<unknown>; | ||
/** | ||
* Unsafe, throws error if failed. Use play_toggle_async or try_play_toggle_async unless you don't care about the result. | ||
*/ | ||
play_toggle(): void; | ||
/** | ||
* Safer play_async. Normal play will try to start the player even if the track hasn't started yet, or was previously suspended/closed | ||
*/ | ||
try_play_async(): Promise<unknown>; | ||
/** | ||
* Will try to play even if the audio context was suspended or closed. Best to use try_play_async() | ||
*/ | ||
play_async(): Promise<unknown>; | ||
/** | ||
* Unsafe, throws error if failed. Use play_async or try_play_async unless you don't care about the result. | ||
*/ | ||
play(): void; | ||
/** | ||
* Safe technically. Even if audioContext is suspended or closed it will pretend that it paused. | ||
*/ | ||
pause(): void; | ||
/** | ||
* Will only load metadata of the upcoming song. Need to call try_play_async() afterwards to start the playback | ||
*/ | ||
try_new_song_async(path: string): Promise<unknown>; | ||
/** | ||
* Won't tell if you if the song actually got loaded or if it failed. For a safer version use try_new_song_async() unless you don't care about the result | ||
*/ | ||
new_song(path: string): void; | ||
/** | ||
* Will parse the duration of the song to make it easy to display in UI | ||
* If somethings undefined it returns "0:00" | ||
*/ | ||
get_formatted_duration(): string; | ||
/** | ||
* Will parse the current time of the song to make it easy to display in UI | ||
* If somethings undefined it returns "0:00" | ||
*/ | ||
get_formatted_current_time(): string; | ||
/** | ||
* Will give current time every animation frame | ||
*/ | ||
on_time_tick(callback: (data: any) => void): void; | ||
/** | ||
* Will give formatted current time via get_formatted_current_time() every animation frame | ||
*/ | ||
on_time_tick_formatted(callback: (data: any) => void): void; | ||
/** | ||
* Will give formatted duration time via get_formatted_duration() every animation frame | ||
*/ | ||
on_duration_formatted(callback: (data: any) => void): void; | ||
} | ||
export declare class MusicPlayerBuilder { | ||
#private; | ||
private audio_element; | ||
/** | ||
* Creates a context and #gain( Gets connected at the end ) | ||
* will throw if audio_element is undefined (stupid vue setup amirite?) | ||
* will throw if user has not interacted with the page yet (Can't initiate AudioContext) | ||
*/ | ||
constructor(audio_element: HTMLAudioElement); | ||
/** | ||
* For external use, not kept inside player after connection. | ||
* @returns {AnalyserNode} | ||
*/ | ||
add_analyser(): AnalyserNode; | ||
/** | ||
* For external use, not kept inside player after connection. | ||
* @returns {StereoPannerNode} | ||
*/ | ||
add_stereo_panner_node(): StereoPannerNode; | ||
/** | ||
* For external use, not kept inside player after connection. | ||
* @returns {StereoPannerNode} | ||
*/ | ||
add_wave_shaper_node(): WaveShaperNode; | ||
/** | ||
* For additional trickery, you can connect your own node. | ||
*/ | ||
connect_custom_node(node: AudioNode): void; | ||
/** | ||
* Only use if you need to connect the #gain before another node, | ||
* eg. if you want the analyser nodes output to be affected by user #gain | ||
*/ | ||
connect_gain(): void; | ||
/** | ||
* Finishes the build | ||
* @returns {Euterpe} | ||
*/ | ||
build(): MusicPlayer; | ||
} |
468
src/index.js
@@ -0,1 +1,3 @@ | ||
var _MusicPlayer_instances, _MusicPlayer_volume_cache, _MusicPlayer_pub_sub, _MusicPlayer_emit_time, _MusicPlayer_emit_duration_fmt, _MusicPlayer_emit_time_fmt, _MusicPlayerBuilder_audio_context, _MusicPlayerBuilder_gain, _MusicPlayerBuilder_track, _MusicPlayerBuilder_volume, _MusicPlayerBuilder_prev_node, _MusicPlayerBuilder_is_gain_connected; | ||
import { __classPrivateFieldGet, __classPrivateFieldSet } from "tslib"; | ||
export var SubscribeEvents; | ||
@@ -7,19 +9,21 @@ (function (SubscribeEvents) { | ||
})(SubscribeEvents || (SubscribeEvents = {})); | ||
const PubSub = () => { | ||
//el = event listener | ||
const el_current_time_tick = []; | ||
const el_formatted_duration_tick = []; | ||
const el_formatted_current_time_tick = []; | ||
function subscribe(event_name, func) { | ||
class PubSub { | ||
constructor() { | ||
//el = event listener | ||
this.el_current_time_tick = []; | ||
this.el_formatted_duration_tick = []; | ||
this.el_formatted_current_time_tick = []; | ||
} | ||
subscribe(event_name, func) { | ||
switch (event_name) { | ||
case SubscribeEvents.CurrentTimeTick: { | ||
el_current_time_tick.push(func); | ||
this.el_current_time_tick.push(func); | ||
break; | ||
} | ||
case SubscribeEvents.FormattedDurationTick: { | ||
el_formatted_duration_tick.push(func); | ||
this.el_formatted_duration_tick.push(func); | ||
break; | ||
} | ||
case SubscribeEvents.FormattedCurrentTimeTick: { | ||
el_formatted_current_time_tick.push(func); | ||
this.el_formatted_current_time_tick.push(func); | ||
break; | ||
@@ -29,7 +33,7 @@ } | ||
} | ||
function unsubscribe(event_name, func) { | ||
unsubscribe(event_name, func) { | ||
switch (event_name) { | ||
case SubscribeEvents.CurrentTimeTick: { | ||
if (el_current_time_tick.includes(func)) { | ||
el_current_time_tick.splice(el_current_time_tick.indexOf(func), 1); | ||
if (this.el_current_time_tick.includes(func)) { | ||
this.el_current_time_tick.splice(this.el_current_time_tick.indexOf(func), 1); | ||
} | ||
@@ -39,4 +43,4 @@ break; | ||
case SubscribeEvents.FormattedDurationTick: { | ||
if (el_formatted_duration_tick.includes(func)) { | ||
el_formatted_duration_tick.splice(el_formatted_duration_tick.indexOf(func), 1); | ||
if (this.el_formatted_duration_tick.includes(func)) { | ||
this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1); | ||
} | ||
@@ -46,4 +50,4 @@ break; | ||
case SubscribeEvents.FormattedCurrentTimeTick: { | ||
if (el_formatted_duration_tick.includes(func)) { | ||
el_formatted_duration_tick.splice(el_formatted_duration_tick.indexOf(func), 1); | ||
if (this.el_formatted_duration_tick.includes(func)) { | ||
this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1); | ||
} | ||
@@ -54,6 +58,6 @@ break; | ||
} | ||
function emit(event_name, data) { | ||
emit(event_name, data) { | ||
switch (event_name) { | ||
case SubscribeEvents.CurrentTimeTick: { | ||
el_current_time_tick.forEach((func) => { | ||
this.el_current_time_tick.forEach((func) => { | ||
func(data); | ||
@@ -64,3 +68,3 @@ }); | ||
case SubscribeEvents.FormattedDurationTick: { | ||
el_formatted_duration_tick.forEach((func) => { | ||
this.el_formatted_duration_tick.forEach((func) => { | ||
func(data); | ||
@@ -71,3 +75,3 @@ }); | ||
case SubscribeEvents.FormattedCurrentTimeTick: { | ||
el_formatted_current_time_tick.forEach((func) => { | ||
this.el_formatted_current_time_tick.forEach((func) => { | ||
func(data); | ||
@@ -79,33 +83,29 @@ }); | ||
} | ||
return { | ||
el_current_time_tick, | ||
el_formatted_duration_tick, | ||
el_formatted_current_time_tick, | ||
subscribe, | ||
unsubscribe, | ||
emit | ||
}; | ||
}; | ||
export const MusicPlayer = (audio_context_i, audio_element_i, track_i, gain_i, volume_i, current_song_path_i) => { | ||
const audio_element = audio_element_i; | ||
const audio_context = audio_context_i; | ||
const track = track_i; | ||
const gain = gain_i; | ||
let current_song_path = current_song_path_i; | ||
let current_song_duration; | ||
let volume_cache = volume_i; | ||
let volume = volume_i; | ||
let is_playing = false; | ||
let time = 0; | ||
const pub_sub = PubSub(); | ||
function mute_toggle() { | ||
if (gain.gain.value == 0) { | ||
unmute(); | ||
} | ||
export class MusicPlayer { | ||
constructor(audio_context, audio_element, track, gain, volume, current_song_path) { | ||
_MusicPlayer_instances.add(this); | ||
this.audio_context = audio_context; | ||
this.audio_element = audio_element; | ||
this.track = track; | ||
this.gain = gain; | ||
this.volume = volume; | ||
this.current_song_path = current_song_path; | ||
this.current_song_duration = 0; | ||
_MusicPlayer_volume_cache.set(this, void 0); | ||
this.is_playing = false; | ||
this.time = 0; | ||
_MusicPlayer_pub_sub.set(this, new PubSub); | ||
__classPrivateFieldSet(this, _MusicPlayer_volume_cache, volume, "f"); | ||
} | ||
mute_toggle() { | ||
if (this.gain.gain.value == 0) { | ||
this.unmute(); | ||
} | ||
else { | ||
mute(); | ||
this.mute(); | ||
} | ||
} | ||
function mute() { | ||
volume_cache = gain.gain.value; | ||
mute() { | ||
__classPrivateFieldSet(this, _MusicPlayer_volume_cache, this.gain.gain.value, "f"); | ||
/* Gentler mute, doesn't pop | ||
@@ -116,9 +116,9 @@ gain.gain.linearRampToValueAtTime( | ||
);*/ | ||
volume = gain.gain.value = 0; | ||
this.volume = this.gain.gain.value = 0; | ||
} | ||
function unmute() { | ||
volume = gain.gain.value = volume_cache; | ||
unmute() { | ||
this.volume = this.gain.gain.value = __classPrivateFieldGet(this, _MusicPlayer_volume_cache, "f"); | ||
} | ||
function change_volume(volume_i) { | ||
volume = gain.gain.value = volume_i; | ||
change_volume(volume_i) { | ||
this.volume = this.gain.gain.value = volume_i; | ||
} | ||
@@ -128,9 +128,9 @@ /** | ||
*/ | ||
function try_seek_async(new_time) { | ||
try_seek_async(new_time) { | ||
return new Promise((resolve, reject) => { | ||
if (track.context.state == "closed" || track.context.state == "suspended") { | ||
is_playing = false; | ||
if (this.track.context.state == "closed" || this.track.context.state == "suspended") { | ||
this.is_playing = false; | ||
reject("Can't seek - track not playing"); | ||
} | ||
audio_element.currentTime = new_time; | ||
this.audio_element.currentTime = new_time; | ||
resolve(null); | ||
@@ -143,36 +143,37 @@ /*audio_element.play().then((s) => resolve(s), (r) => { | ||
} | ||
/** | ||
* Can try to seek even if the audio context was suspended or closed. Best to use try_seek_async() | ||
*/ | ||
function seek_async(new_time) { | ||
return new Promise((resolve, reject) => { | ||
audio_element.currentTime = new_time; | ||
resolve(null); | ||
/* audio_element.play().then((s) => resolve(s), (r) => { | ||
is_playing = false | ||
reject(r) | ||
})*/ | ||
}); | ||
// THIS MIGHT BE UNNECESSARY? CUZ SEEKING DOESN'T REQUIRE PLAY | ||
// /** | ||
// * Can try to seek even if the audio context was suspended or closed. Best to use try_seek_async() | ||
// */ | ||
// seek_async(new_time: number) { | ||
// return new Promise((resolve, reject) => { | ||
// this.audio_element.currentTime = new_time | ||
// resolve(null) | ||
// /* audio_element.play().then((s) => resolve(s), (r) => { | ||
// is_playing = false | ||
// reject(r) | ||
// })*/ | ||
// }) | ||
// // } | ||
// /** | ||
// * Unsafe, throws error if failed. Use try_seek_async or seek_async unless you don't care about the result. | ||
// */ | ||
seek(new_time) { | ||
this.audio_element.currentTime = new_time; | ||
//this.audio_element.play().catch((e) => { throw e }) | ||
} | ||
/** | ||
* Unsafe, throws error if failed. Use try_seek_async or seek_async unless you don't care about the result. | ||
*/ | ||
function seek(new_time) { | ||
audio_element.currentTime = new_time; | ||
audio_element.play().catch((e) => { throw e; }); | ||
} | ||
/** | ||
* Safer play_toggle_async. Normal play_toggle will try to start the player even if the track hasn't started yet, or was previously suspended/closed | ||
*/ | ||
function try_play_toggle_async() { | ||
try_play_toggle_async() { | ||
return new Promise((resolve, reject) => { | ||
if (audio_context.state === "suspended" || audio_context.state === "closed") { | ||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") { | ||
reject("Context closed or suspended"); | ||
} | ||
if (audio_element.paused) { | ||
audio_element.play().then((s) => { | ||
is_playing = true; | ||
if (this.audio_element.paused) { | ||
this.audio_element.play().then((s) => { | ||
this.is_playing = true; | ||
resolve(s); | ||
}, (r) => { | ||
is_playing = false; | ||
this.is_playing = false; | ||
reject(r); | ||
@@ -182,4 +183,4 @@ }); | ||
else { | ||
audio_element.pause(); | ||
is_playing = false; | ||
this.audio_element.pause(); | ||
this.is_playing = false; | ||
resolve(null); | ||
@@ -192,13 +193,13 @@ } | ||
*/ | ||
function play_toggle_async() { | ||
play_toggle_async() { | ||
return new Promise((resolve, reject) => { | ||
if (audio_context.state === "suspended" || audio_context.state === "closed") { | ||
audio_context.resume(); | ||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") { | ||
this.audio_context.resume(); | ||
} | ||
if (audio_element.paused) { | ||
audio_element.play().then((s) => { | ||
is_playing = true; | ||
if (this.audio_element.paused) { | ||
this.audio_element.play().then((s) => { | ||
this.is_playing = true; | ||
resolve(s); | ||
}, (r) => { | ||
is_playing = false; | ||
this.is_playing = false; | ||
reject(r); | ||
@@ -208,4 +209,4 @@ }); | ||
else { | ||
audio_element.pause(); | ||
is_playing = false; | ||
this.audio_element.pause(); | ||
this.is_playing = false; | ||
resolve(null); | ||
@@ -218,7 +219,7 @@ } | ||
*/ | ||
function play_toggle() { | ||
if (audio_element.paused) { | ||
is_playing = true; | ||
audio_element.play().catch((r) => { | ||
is_playing = false; | ||
play_toggle() { | ||
if (this.audio_element.paused) { | ||
this.is_playing = true; | ||
this.audio_element.play().catch((r) => { | ||
this.is_playing = false; | ||
throw r; | ||
@@ -228,4 +229,4 @@ }); | ||
else { | ||
is_playing = false; | ||
audio_element.pause(); | ||
this.is_playing = false; | ||
this.audio_element.pause(); | ||
} | ||
@@ -236,14 +237,14 @@ } | ||
*/ | ||
function try_play_async() { | ||
try_play_async() { | ||
return new Promise((resolve, reject) => { | ||
if (is_playing) | ||
if (this.is_playing) | ||
reject(Error("Already playing")); | ||
if (audio_context.state === "suspended" || audio_context.state === "closed") { | ||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") { | ||
reject("Context closed or suspended"); | ||
} | ||
audio_element.play().then((s) => { | ||
is_playing = true; | ||
this.audio_element.play().then((s) => { | ||
this.is_playing = true; | ||
resolve(s); | ||
}, (r) => { | ||
is_playing = false; | ||
this.is_playing = false; | ||
reject(r); | ||
@@ -256,14 +257,14 @@ }); | ||
*/ | ||
function play_async() { | ||
play_async() { | ||
return new Promise((resolve, reject) => { | ||
if (is_playing) | ||
if (this.is_playing) | ||
resolve(null); | ||
if (audio_context.state === "suspended" || audio_context.state === "closed") { | ||
audio_context.resume(); | ||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") { | ||
this.audio_context.resume(); | ||
} | ||
audio_element.play().then((s) => { | ||
is_playing = true; | ||
this.audio_element.play().then((s) => { | ||
this.is_playing = true; | ||
resolve(s); | ||
}, (r) => { | ||
is_playing = false; | ||
this.is_playing = false; | ||
reject(r); | ||
@@ -276,7 +277,7 @@ }); | ||
*/ | ||
function play() { | ||
if (is_playing) | ||
play() { | ||
if (this.is_playing) | ||
return; | ||
audio_element.play().catch((r) => { | ||
is_playing = false; | ||
this.audio_element.play().catch((r) => { | ||
this.is_playing = false; | ||
throw r; | ||
@@ -288,5 +289,5 @@ }); | ||
*/ | ||
function pause() { | ||
audio_element.pause(); | ||
is_playing = false; | ||
pause() { | ||
this.audio_element.pause(); | ||
this.is_playing = false; | ||
} | ||
@@ -296,25 +297,20 @@ /** | ||
*/ | ||
function try_new_song_async(path) { | ||
try_new_song_async(path) { | ||
return new Promise((resolve, reject) => { | ||
audio_element.src = current_song_path = path; | ||
this.audio_element.src = this.current_song_path = path; | ||
//Found out today about this. Such a nice new way to mass remove event listeners! | ||
const controller = new AbortController(); | ||
audio_element.addEventListener("canplaythrough", function canplay_listener(s) { | ||
//current_song_duration = audio_element.duration | ||
this.audio_element.addEventListener("canplaythrough", function canplay_listener(s) { | ||
controller.abort(); | ||
resolve(s); | ||
}, { signal: controller.signal }); | ||
audio_element.addEventListener("error", function error_listener(e) { | ||
this.audio_element.addEventListener("error", function error_listener(e) { | ||
controller.abort(); | ||
reject(e); | ||
}, { signal: controller.signal }); | ||
audio_element.addEventListener("abort", function abort_listener(e) { | ||
this.audio_element.addEventListener("stalled", function stalled_listener(e) { | ||
controller.abort(); | ||
reject(e); | ||
}, { signal: controller.signal }); | ||
audio_element.addEventListener("stalled", function stalled_listener(e) { | ||
controller.abort(); | ||
reject(e); | ||
}, { signal: controller.signal }); | ||
is_playing = false; | ||
this.is_playing = false; | ||
}); | ||
@@ -325,5 +321,5 @@ } | ||
*/ | ||
function new_song(path) { | ||
audio_element.src = current_song_path = path; | ||
//current_song_duration = audio_element.duration | ||
new_song(path) { | ||
this.audio_element.src = this.current_song_path = path; | ||
this.current_song_duration = this.audio_element.duration; | ||
} | ||
@@ -334,5 +330,5 @@ /** | ||
*/ | ||
function get_formatted_duration() { | ||
const dur = audio_element.duration; | ||
current_song_duration = audio_element.duration; | ||
get_formatted_duration() { | ||
const dur = this.audio_element.duration; | ||
this.current_song_duration = this.audio_element.duration; | ||
if (dur == 0 || !dur) | ||
@@ -356,4 +352,4 @@ return "0:00"; | ||
*/ | ||
function get_formatted_current_time() { | ||
const curr = audio_element.currentTime; | ||
get_formatted_current_time() { | ||
const curr = this.audio_element.currentTime; | ||
if (curr == 0 || !curr) | ||
@@ -376,88 +372,66 @@ return "0:00"; | ||
*/ | ||
function subscribe_to_time_tick(callback) { | ||
pub_sub.subscribe(SubscribeEvents.CurrentTimeTick, callback); | ||
emit_current_time(); | ||
on_time_tick(callback) { | ||
__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").subscribe(SubscribeEvents.CurrentTimeTick, callback); | ||
__classPrivateFieldGet(this, _MusicPlayer_instances, "m", _MusicPlayer_emit_time).call(this); | ||
} | ||
function emit_current_time() { | ||
const request_id = requestAnimationFrame(emit_current_time.bind(MusicPlayer)); | ||
if (audio_element.ended) | ||
is_playing = false; | ||
if (audio_element.paused) | ||
is_playing == false; | ||
// if use reactively changes volume directly | ||
gain.gain.value = volume; | ||
time = audio_element.currentTime; | ||
if (pub_sub.el_current_time_tick.length == 0) | ||
cancelAnimationFrame(request_id); | ||
pub_sub.emit(SubscribeEvents.CurrentTimeTick, time); | ||
} | ||
/** | ||
* Will give formatted current time via get_formatted_current_time() every animation frame | ||
*/ | ||
function subscribe_to_formatted_current_time_tick(callback) { | ||
pub_sub.subscribe(SubscribeEvents.FormattedCurrentTimeTick, callback); | ||
emit_formatted_current_time(); | ||
on_time_tick_formatted(callback) { | ||
__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").subscribe(SubscribeEvents.FormattedCurrentTimeTick, callback); | ||
__classPrivateFieldGet(this, _MusicPlayer_instances, "m", _MusicPlayer_emit_time_fmt).call(this); | ||
} | ||
function emit_formatted_current_time() { | ||
const request_id = requestAnimationFrame(emit_formatted_current_time.bind(MusicPlayer)); | ||
const time = get_formatted_current_time(); | ||
//if (pub_sub.el_formatted_current_time_tick.length == 0) cancelAnimationFrame(request_id) | ||
pub_sub.emit(SubscribeEvents.FormattedCurrentTimeTick, time); | ||
} | ||
/** | ||
* Will give formatted duration time via get_formatted_duration() every animation frame | ||
*/ | ||
function subscribe_to_formatted_duration_time(callback) { | ||
pub_sub.subscribe(SubscribeEvents.FormattedDurationTick, callback); | ||
emit_formatted_duration_time(); | ||
on_duration_formatted(callback) { | ||
__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").subscribe(SubscribeEvents.FormattedDurationTick, callback); | ||
__classPrivateFieldGet(this, _MusicPlayer_instances, "m", _MusicPlayer_emit_duration_fmt).call(this); | ||
} | ||
function emit_formatted_duration_time() { | ||
const request_id = requestAnimationFrame(emit_formatted_duration_time.bind(MusicPlayer)); | ||
const time = get_formatted_duration(); | ||
//if (pub_sub.el_formatted_duration_tick.length == 0) cancelAnimationFrame(request_id) | ||
pub_sub.emit(SubscribeEvents.FormattedDurationTick, time); | ||
} | ||
return { | ||
track, | ||
get_is_playing: () => is_playing, | ||
get_current_path: () => current_song_path, | ||
get_current_duration: () => current_song_duration, | ||
volume, | ||
time, | ||
mute, | ||
unmute, | ||
mute_toggle, | ||
change_volume, | ||
try_seek_async, | ||
seek_async, | ||
seek, | ||
play, | ||
pause, | ||
play_toggle, | ||
play_toggle_async, | ||
try_play_toggle_async, | ||
try_new_song_async, | ||
new_song, | ||
get_formatted_duration, | ||
get_formatted_current_time, | ||
subscribe_to_formatted_current_time_tick, | ||
subscribe_to_formatted_duration_time, | ||
subscribe_to_time_tick, | ||
try_play_async, | ||
play_async, | ||
}; | ||
} | ||
_MusicPlayer_volume_cache = new WeakMap(), _MusicPlayer_pub_sub = new WeakMap(), _MusicPlayer_instances = new WeakSet(), _MusicPlayer_emit_time = function _MusicPlayer_emit_time() { | ||
const request_id = requestAnimationFrame(__classPrivateFieldGet(this, _MusicPlayer_instances, "m", _MusicPlayer_emit_time).bind(this)); | ||
if (this.audio_element.ended) | ||
this.is_playing = false; | ||
if (this.audio_element.paused) | ||
this.is_playing == false; | ||
// if use reactively changes volume directly | ||
this.gain.gain.value = this.volume; | ||
this.time = this.audio_element.currentTime; | ||
if (__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").el_current_time_tick.length == 0) | ||
cancelAnimationFrame(request_id); | ||
__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").emit(SubscribeEvents.CurrentTimeTick, this.time); | ||
}, _MusicPlayer_emit_duration_fmt = function _MusicPlayer_emit_duration_fmt() { | ||
const request_id = requestAnimationFrame(__classPrivateFieldGet(this, _MusicPlayer_instances, "m", _MusicPlayer_emit_duration_fmt).bind(this)); | ||
const time = this.get_formatted_duration(); | ||
if (__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").el_formatted_duration_tick.length == 0) | ||
cancelAnimationFrame(request_id); | ||
__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").emit(SubscribeEvents.FormattedDurationTick, time); | ||
}, _MusicPlayer_emit_time_fmt = function _MusicPlayer_emit_time_fmt() { | ||
const request_id = requestAnimationFrame(__classPrivateFieldGet(this, _MusicPlayer_instances, "m", _MusicPlayer_emit_time_fmt).bind(this)); | ||
const time = this.get_formatted_current_time(); | ||
if (__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").el_formatted_current_time_tick.length == 0) | ||
cancelAnimationFrame(request_id); | ||
__classPrivateFieldGet(this, _MusicPlayer_pub_sub, "f").emit(SubscribeEvents.FormattedCurrentTimeTick, time); | ||
}; | ||
export function MusicPlayerBuilder(audio_element) { | ||
let audio_context; | ||
let gain; | ||
let track; | ||
const volume = 1; | ||
let prev_node; | ||
let is_gain_connected = false; | ||
export class MusicPlayerBuilder { | ||
/** | ||
* Creates a context and gain( Gets connected at the end ) | ||
* Creates a context and #gain( Gets connected at the end ) | ||
* will throw if audio_element is undefined (stupid vue setup amirite?) | ||
* will throw if user has not interacted with the page yet (Can't initiate AudioContext) | ||
*/ | ||
function start() { | ||
constructor(audio_element) { | ||
this.audio_element = audio_element; | ||
_MusicPlayerBuilder_audio_context.set(this, void 0); | ||
_MusicPlayerBuilder_gain.set(this, void 0); | ||
_MusicPlayerBuilder_track.set(this, void 0); | ||
_MusicPlayerBuilder_volume.set(this, 1); | ||
_MusicPlayerBuilder_prev_node.set(this, void 0); | ||
_MusicPlayerBuilder_is_gain_connected.set(this, false | ||
/** | ||
* Creates a context and #gain( Gets connected at the end ) | ||
* will throw if audio_element is undefined (stupid vue setup amirite?) | ||
* will throw if user has not interacted with the page yet (Can't initiate AudioContext) | ||
*/ | ||
); | ||
if (audio_element === undefined) | ||
@@ -467,5 +441,5 @@ throw Error("audio_element was undefined"); | ||
const AudioContext = window.AudioContext || window.webkitAudioContext; | ||
audio_context = new AudioContext(); | ||
track = audio_context.createMediaElementSource(audio_element); | ||
gain = audio_context.createGain(); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_audio_context, new AudioContext(), "f"); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_track, __classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f").createMediaElementSource(audio_element), "f"); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_gain, __classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f").createGain(), "f"); | ||
} | ||
@@ -476,6 +450,6 @@ /** | ||
*/ | ||
function add_analyser() { | ||
const analyser = audio_context.createAnalyser(); | ||
!prev_node ? track.connect(analyser) : prev_node.connect(analyser); | ||
prev_node = analyser; | ||
add_analyser() { | ||
const analyser = __classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f").createAnalyser(); | ||
!__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f") ? __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f").connect(analyser) : __classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(analyser); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_prev_node, analyser, "f"); | ||
return analyser; | ||
@@ -487,6 +461,6 @@ } | ||
*/ | ||
function add_stereo_panner_node() { | ||
const panner = audio_context.createStereoPanner(); | ||
!prev_node ? track.connect(panner) : prev_node.connect(panner); | ||
prev_node = panner; | ||
add_stereo_panner_node() { | ||
const panner = __classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f").createStereoPanner(); | ||
!__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f") ? __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f").connect(panner) : __classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(panner); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_prev_node, panner, "f"); | ||
return panner; | ||
@@ -498,6 +472,6 @@ } | ||
*/ | ||
function add_wave_shaper_node() { | ||
const shaper = audio_context.createWaveShaper(); | ||
!prev_node ? track.connect(shaper) : prev_node.connect(shaper); | ||
prev_node = shaper; | ||
add_wave_shaper_node() { | ||
const shaper = __classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f").createWaveShaper(); | ||
!__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f") ? __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f").connect(shaper) : __classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(shaper); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_prev_node, shaper, "f"); | ||
return shaper; | ||
@@ -508,38 +482,30 @@ } | ||
*/ | ||
function connect_custom_node(node) { | ||
!prev_node ? track.connect(node) : prev_node.connect(node); | ||
prev_node = node; | ||
connect_custom_node(node) { | ||
!__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f") ? __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f").connect(node) : __classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(node); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_prev_node, node, "f"); | ||
} | ||
/** | ||
* Only use if you need to connect the gain before another node, | ||
* eg. if you want the analyser nodes output to be affected by user gain | ||
* Only use if you need to connect the #gain before another node, | ||
* eg. if you want the analyser nodes output to be affected by user #gain | ||
*/ | ||
function connect_gain() { | ||
!prev_node ? track.connect(gain) : prev_node.connect(gain); | ||
prev_node = gain; | ||
is_gain_connected = true; | ||
connect_gain() { | ||
!__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f") ? __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f").connect(__classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f")) : __classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(__classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f")); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_prev_node, __classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f"), "f"); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_is_gain_connected, true, "f"); | ||
} | ||
/** | ||
* Finishes the build | ||
* @returns {MusicPlayer: () => void} | ||
* @returns {Euterpe} | ||
*/ | ||
function build() { | ||
if (!is_gain_connected) { | ||
!prev_node ? track.connect(gain) : prev_node.connect(gain); | ||
prev_node = gain; | ||
build() { | ||
if (!__classPrivateFieldGet(this, _MusicPlayerBuilder_is_gain_connected, "f")) { | ||
!__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f") ? __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f").connect(__classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f")) : __classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(__classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f")); | ||
__classPrivateFieldSet(this, _MusicPlayerBuilder_prev_node, __classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f"), "f"); | ||
} | ||
prev_node.connect(audio_context.destination); | ||
audio_element.preload = "metadata"; | ||
return MusicPlayer(audio_context, audio_element, track, gain, volume); | ||
__classPrivateFieldGet(this, _MusicPlayerBuilder_prev_node, "f").connect(__classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f").destination); | ||
this.audio_element.preload = "metadata"; | ||
return new MusicPlayer(__classPrivateFieldGet(this, _MusicPlayerBuilder_audio_context, "f"), this.audio_element, __classPrivateFieldGet(this, _MusicPlayerBuilder_track, "f"), __classPrivateFieldGet(this, _MusicPlayerBuilder_gain, "f"), __classPrivateFieldGet(this, _MusicPlayerBuilder_volume, "f")); | ||
} | ||
return { | ||
start, | ||
add_analyser, | ||
add_stereo_panner_node, | ||
add_wave_shaper_node, | ||
connect_gain, | ||
connect_custom_node, | ||
build | ||
}; | ||
} | ||
_MusicPlayerBuilder_audio_context = new WeakMap(), _MusicPlayerBuilder_gain = new WeakMap(), _MusicPlayerBuilder_track = new WeakMap(), _MusicPlayerBuilder_volume = new WeakMap(), _MusicPlayerBuilder_prev_node = new WeakMap(), _MusicPlayerBuilder_is_gain_connected = new WeakMap(); | ||
//# sourceMappingURL=index.js.map |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
45009
618
1