react-voice-visualizer
Advanced tools
Comparing version
import { Controls, useVoiceVisualizerParams } from "../types/types.ts"; | ||
declare function useVoiceVisualizer({ onStartRecording, onStopRecording, onPausedRecording, onResumedRecording, onClearCanvas, onEndAudioPlayback, onStartAudioPlayback, onPausedAudioPlayback, onResumedAudioPlayback, }?: useVoiceVisualizerParams): Controls; | ||
declare function useVoiceVisualizer({ onStartRecording, onStopRecording, onPausedRecording, onResumedRecording, onClearCanvas, onEndAudioPlayback, onStartAudioPlayback, onPausedAudioPlayback, onResumedAudioPlayback, onErrorPlayingAudio, }?: useVoiceVisualizerParams): Controls; | ||
export default useVoiceVisualizer; |
(function(){"use strict";(e=>{try{if(typeof window>"u")return;var i=document.createElement("style");i.appendChild(document.createTextNode(e)),document.head.appendChild(i)}catch(o){console.error("vite-plugin-css-injected-by-js",o)}})(".voice-visualizer__buttons-container{display:flex;justify-content:center;align-items:center;column-gap:20px;row-gap:15px;flex-wrap:wrap;margin-bottom:40px}.voice-visualizer__btn-center{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#fff;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s}.voice-visualizer__btn-center:hover{background-color:#eaeaea}.voice-visualizer__btn-center>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause{background-color:#ff3030}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover{background-color:#ff4f4f}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause>img{height:50%;max-height:16px}.voice-visualizer__btn-center:hover{border:4px solid #9f9f9f}.voice-visualizer__btn-left{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#ff3030;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s,opacity .3s}.voice-visualizer__btn-left:hover{background-color:#ff4f4f}.voice-visualizer__btn-left:disabled{opacity:.6;background-color:#ff3030}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone{background-color:#fff}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-left>img{width:auto;height:50%;max-height:16px}.voice-visualizer__btn-left:hover{border:4px solid #9f9f9f}.voice-visualizer__btn{box-sizing:border-box;min-width:100px;min-height:60px;padding:5px 20px;border-radius:40px;font-size:15px;background-color:#f0f0f0;transition:background-color .3s,opacity .3s}.voice-visualizer__btn:disabled{opacity:.8;background-color:#f0f0f0}.voice-visualizer__btn:hover{background-color:#bebebe}.voice-visualizer__canvas-container{position:relative;width:fit-content;margin:0 auto;overflow:hidden}.voice-visualizer__canvas-container canvas{display:block}.voice-visualizer__canvas-microphone-btn{position:absolute;top:50%;left:50%;width:auto;max-width:12%;min-width:24px;height:50%;max-height:100px;background-color:transparent;border:none;outline:none;transform:translate(-50%,-50%)}.voice-visualizer__canvas-microphone-icon{width:100%;height:100%;will-change:transform;transition:transform .3s}.voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon{transform:scale(1.03)}.voice-visualizer__canvas-audio-wave-icon{position:absolute;top:50%;left:50%;width:auto;max-width:40%;height:40%;max-height:100px;transform:translate(-118%,-50%) scale(-1)}.voice-visualizer__canvas-audio-wave-icon2{transform:translate(18%,-50%)}.voice-visualizer__canvas-audio-processing{position:absolute;top:50%;left:50%;margin:0;transform:translate(-50%,-50%)}.voice-visualizer__progress-indicator-hovered{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#85858599}.voice-visualizer__progress-indicator-hovered-time{position:absolute;top:3%;left:1px;width:fit-content;margin:0;padding:0 7px;opacity:.8;font-size:12px;border-radius:0 4px 4px 0;background-color:#575757;text-align:left}.voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__progress-indicator{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#efefef}.voice-visualizer__progress-indicator-time{position:absolute;top:3%;left:1px;width:fit-content;box-sizing:border-box;min-width:37px;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;text-align:left;color:#000;font-weight:500;background-color:#efefef}.voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__audio-info-container{box-sizing:border-box;height:55px;display:flex;align-items:center;justify-content:center;gap:30px}.voice-visualizer__audio-info-time{margin:15px 0;min-width:38px;text-align:left}.voice-visualizer__visually-hidden{position:absolute;width:1px;height:1px;margin:-1px;padding:0;border:4px solid #c5c5c5;white-space:nowrap;clip-path:inset(100%);clip:rect(0 0 0 0);overflow:hidden}")})(); | ||
import { jsx as a, jsxs as de, Fragment as Ue } from "react/jsx-runtime"; | ||
import { useState as l, useRef as y, useCallback as tt, useLayoutEffect as We, forwardRef as rt, useEffect as Z } from "react"; | ||
const He = ({ | ||
import { jsx as h, jsxs as de, Fragment as Ge } from "react/jsx-runtime"; | ||
import { useState as l, useRef as S, useCallback as rt, useLayoutEffect as Qe, forwardRef as nt, useEffect as Z } from "react"; | ||
const Re = ({ | ||
canvas: e, | ||
backgroundColor: t | ||
}) => { | ||
const n = e.height, r = e.width, c = Math.round(r / 2), s = e.getContext("2d"); | ||
return s ? (s.clearRect(0, 0, r, n), t !== "transparent" && (s.fillStyle = t, s.fillRect(0, 0, r, n)), { context: s, height: n, width: r, halfWidth: c }) : null; | ||
const n = e.height, r = e.width, o = Math.round(r / 2), u = e.getContext("2d"); | ||
return u ? (u.clearRect(0, 0, r, n), t !== "transparent" && (u.fillStyle = t, u.fillRect(0, 0, r, n)), { context: u, height: n, width: r, halfWidth: o }) : null; | ||
}, De = ({ | ||
@@ -15,8 +15,8 @@ context: e, | ||
x: r, | ||
y: c, | ||
w: s, | ||
h: g | ||
y: o, | ||
w: u, | ||
h: p | ||
}) => { | ||
e.fillStyle = t, e.beginPath(), e.roundRect ? (e.roundRect(r, c, s, g, n), e.fill()) : e.fillRect(r, c, s, g); | ||
}, nt = ({ | ||
e.fillStyle = t, e.beginPath(), e.roundRect ? (e.roundRect(r, o, u, p, n), e.fill()) : e.fillRect(r, o, u, p); | ||
}, it = ({ | ||
barsData: e, | ||
@@ -26,22 +26,22 @@ canvas: t, | ||
gap: r, | ||
backgroundColor: c, | ||
mainBarColor: s, | ||
secondaryBarColor: g, | ||
currentAudioTime: v = 0, | ||
rounded: M, | ||
duration: u | ||
backgroundColor: o, | ||
mainBarColor: u, | ||
secondaryBarColor: p, | ||
currentAudioTime: m = 0, | ||
rounded: I, | ||
duration: z | ||
}) => { | ||
const I = He({ canvas: t, backgroundColor: c }); | ||
if (!I) | ||
const g = Re({ canvas: t, backgroundColor: o }); | ||
if (!g) | ||
return; | ||
const { context: f, height: w } = I, L = v / u; | ||
e.forEach((o, p) => { | ||
const H = p / e.length, m = L > H; | ||
const { context: d, height: N } = g, L = m / z; | ||
e.forEach((s, M) => { | ||
const F = M / e.length, v = L > F; | ||
De({ | ||
context: f, | ||
color: m ? g : s, | ||
rounded: M, | ||
x: p * (n + r * n), | ||
y: w / 2 - o.max, | ||
h: o.max * 2, | ||
context: d, | ||
color: v ? p : u, | ||
rounded: I, | ||
x: M * (n + r * n), | ||
y: N / 2 - s.max, | ||
h: s.max * 2, | ||
w: n | ||
@@ -51,3 +51,3 @@ }); | ||
}; | ||
function it({ | ||
function ct({ | ||
context: e, | ||
@@ -57,4 +57,4 @@ color: t, | ||
width: r, | ||
height: c, | ||
barWidth: s | ||
height: o, | ||
barWidth: u | ||
}) { | ||
@@ -65,9 +65,9 @@ De({ | ||
rounded: n, | ||
x: r / 2 + s / 2, | ||
y: c / 2 - 1, | ||
x: r / 2 + u / 2, | ||
y: o / 2 - 1, | ||
h: 2, | ||
w: r - (r / 2 + s / 2) | ||
w: r - (r / 2 + u / 2) | ||
}); | ||
} | ||
const ct = ({ | ||
const ot = ({ | ||
audioData: e, | ||
@@ -77,66 +77,66 @@ unit: t, | ||
index2: r, | ||
canvas: c, | ||
isRecordingInProgress: s, | ||
isPausedRecording: g, | ||
picks: v, | ||
backgroundColor: M, | ||
barWidth: u, | ||
mainBarColor: I, | ||
secondaryBarColor: f, | ||
rounded: w, | ||
canvas: o, | ||
isRecordingInProgress: u, | ||
isPausedRecording: p, | ||
picks: m, | ||
backgroundColor: I, | ||
barWidth: z, | ||
mainBarColor: g, | ||
secondaryBarColor: d, | ||
rounded: N, | ||
animateCurrentPick: L, | ||
fullscreen: o | ||
fullscreen: s | ||
}) => { | ||
const p = He({ canvas: c, backgroundColor: M }); | ||
if (!p) | ||
const M = Re({ canvas: o, backgroundColor: I }); | ||
if (!M) | ||
return; | ||
const { context: H, height: m, width: x, halfWidth: j } = p; | ||
if (e != null && e.length && s) { | ||
const F = Math.max(...e); | ||
if (!g) { | ||
if (r.current >= u) { | ||
const { context: F, height: v, width: U, halfWidth: C } = M; | ||
if (e != null && e.length && u) { | ||
const x = Math.max(...e); | ||
if (!p) { | ||
if (r.current >= z) { | ||
r.current = 0; | ||
const D = (m - F / 258 * m) / m * 100, U = (-m + F / 258 * m * 2) / m * 100, V = n.current === u ? { | ||
startY: D, | ||
barHeight: U | ||
const A = (v - x / 258 * v) / v * 100, q = (-v + x / 258 * v * 2) / v * 100, Y = n.current === z ? { | ||
startY: A, | ||
barHeight: q | ||
} : null; | ||
n.current >= t ? n.current = u : n.current += u, v.length > (o ? x : j) / u && v.pop(), v.unshift(V); | ||
n.current >= t ? n.current = z : n.current += z, m.length > (s ? U : C) / z && m.pop(), m.unshift(Y); | ||
} | ||
r.current += 1; | ||
} | ||
!o && Q(), L && De({ | ||
context: H, | ||
rounded: w, | ||
color: I, | ||
x: o ? x : j, | ||
y: m - F / 258 * m, | ||
h: -m + F / 258 * m * 2, | ||
w: u | ||
!s && _(), L && De({ | ||
context: F, | ||
rounded: N, | ||
color: g, | ||
x: s ? U : C, | ||
y: v - x / 258 * v, | ||
h: -v + x / 258 * v * 2, | ||
w: z | ||
}); | ||
let B = (o ? x : j) - r.current; | ||
v.forEach((D) => { | ||
D && De({ | ||
context: H, | ||
color: I, | ||
rounded: w, | ||
x: B, | ||
y: D.startY * m / 100 > m / 2 - 1 ? m / 2 - 1 : D.startY * m / 100, | ||
h: D.barHeight * m / 100 > 2 ? D.barHeight * m / 100 : 2, | ||
w: u | ||
}), B -= u; | ||
let V = (s ? U : C) - r.current; | ||
m.forEach((A) => { | ||
A && De({ | ||
context: F, | ||
color: g, | ||
rounded: N, | ||
x: V, | ||
y: A.startY * v / 100 > v / 2 - 1 ? v / 2 - 1 : A.startY * v / 100, | ||
h: A.barHeight * v / 100 > 2 ? A.barHeight * v / 100 : 2, | ||
w: z | ||
}), V -= z; | ||
}); | ||
} else | ||
v.length = 0; | ||
function Q() { | ||
it({ | ||
context: H, | ||
color: f, | ||
rounded: w, | ||
width: x, | ||
height: m, | ||
barWidth: u | ||
m.length = 0; | ||
function _() { | ||
ct({ | ||
context: F, | ||
color: d, | ||
rounded: N, | ||
width: U, | ||
height: v, | ||
barWidth: z | ||
}); | ||
} | ||
}, ke = (e) => { | ||
const t = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), r = e % 60, c = Math.floor( | ||
}, Ve = (e) => { | ||
const t = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), r = e % 60, o = Math.floor( | ||
(r - Math.floor(r)) * 1e3 | ||
@@ -148,16 +148,16 @@ ); | ||
)}:${String(Math.floor(r)).padStart(2, "0")}:${String( | ||
c | ||
o | ||
).charAt(0)}` : n > 0 ? `${String(n).padStart(2, "0")}:${String( | ||
Math.floor(r) | ||
).padStart(2, "0")}:${String(c).charAt(0)}` : `${String(Math.floor(r)).padStart(2, "0")}:${String( | ||
c | ||
).padStart(2, "0")}:${String(o).charAt(0)}` : `${String(Math.floor(r)).padStart(2, "0")}:${String( | ||
o | ||
).charAt(0)}`; | ||
}, ot = (e) => { | ||
const t = Math.floor(e / 1e3), n = Math.floor(t / 3600), r = Math.floor(t % 3600 / 60), c = t % 60; | ||
}, st = (e) => { | ||
const t = Math.floor(e / 1e3), n = Math.floor(t / 3600), r = Math.floor(t % 3600 / 60), o = t % 60; | ||
return n > 0 ? `${String(n).padStart(2, "0")}:${String(r).padStart( | ||
2, | ||
"0" | ||
)}:${String(c).padStart(2, "0")}` : `${String(r).padStart(2, "0")}:${String(c).padStart(2, "0")}`; | ||
)}:${String(o).padStart(2, "0")}` : `${String(r).padStart(2, "0")}:${String(o).padStart(2, "0")}`; | ||
}; | ||
function Ye(e) { | ||
function Pe(e) { | ||
if (typeof e == "string") { | ||
@@ -170,3 +170,3 @@ const t = Number(e); | ||
} | ||
const st = ({ | ||
const at = ({ | ||
bufferData: e, | ||
@@ -176,24 +176,24 @@ height: t, | ||
barWidth: r, | ||
gap: c | ||
gap: o | ||
}) => { | ||
const s = n / (r + c * r), g = Math.floor(e.length / s), v = t / 2; | ||
let M = [], u = 0; | ||
for (let I = 0; I < s; I++) { | ||
const f = []; | ||
let w = 0; | ||
for (let o = 0; o < g && I * g + o < e.length; o++) { | ||
const p = e[I * g + o]; | ||
p > 0 && (f.push(p), w++); | ||
const u = n / (r + o * r), p = Math.floor(e.length / u), m = t / 2; | ||
let I = [], z = 0; | ||
for (let g = 0; g < u; g++) { | ||
const d = []; | ||
let N = 0; | ||
for (let s = 0; s < p && g * p + s < e.length; s++) { | ||
const M = e[g * p + s]; | ||
M > 0 && (d.push(M), N++); | ||
} | ||
const L = f.reduce((o, p) => o + p, 0) / w; | ||
L > u && (u = L), M.push({ max: L }); | ||
const L = d.reduce((s, M) => s + M, 0) / N; | ||
L > z && (z = L), I.push({ max: L }); | ||
} | ||
if (v * 0.95 > u * v) { | ||
const I = v * 0.95 / u; | ||
M = M.map((f) => ({ | ||
max: f.max > 0.01 ? f.max * I : 1 | ||
if (m * 0.95 > z * m) { | ||
const g = m * 0.95 / z; | ||
I = I.map((d) => ({ | ||
max: d.max > 0.01 ? d.max * g : 1 | ||
})); | ||
} | ||
return M; | ||
}, at = (e) => { | ||
return I; | ||
}, ut = (e) => { | ||
if (!e) | ||
@@ -203,4 +203,4 @@ return ""; | ||
return t && t.length >= 2 ? `.${t[1]}` : ""; | ||
}, ut = (e) => { | ||
const t = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), r = e % 60, c = Math.floor( | ||
}, ht = (e) => { | ||
const t = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), r = e % 60, o = Math.floor( | ||
(r - Math.floor(r)) * 1e3 | ||
@@ -214,5 +214,5 @@ ); | ||
).padStart(2, "0")}m` : `${String(Math.floor(r)).padStart(2, "0")}:${String( | ||
c | ||
).charAt(0)}${String(c).charAt(1)}s`; | ||
}, ht = (e) => { | ||
o | ||
).charAt(0)}${String(o).charAt(1)}s`; | ||
}, lt = (e) => { | ||
onmessage = (t) => { | ||
@@ -222,3 +222,3 @@ postMessage(e(t.data)); | ||
}; | ||
function lt({ | ||
function mt({ | ||
fn: e, | ||
@@ -228,28 +228,28 @@ initialValue: t, | ||
}) { | ||
const [r, c] = l(t); | ||
const [r, o] = l(t); | ||
return { | ||
result: r, | ||
setResult: c, | ||
run: (g) => { | ||
const v = new Worker( | ||
setResult: o, | ||
run: (p) => { | ||
const m = new Worker( | ||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions | ||
URL.createObjectURL(new Blob([`(${ht})(${e})`])) | ||
URL.createObjectURL(new Blob([`(${lt})(${e})`])) | ||
); | ||
v.onmessage = (M) => { | ||
M.data && (c(M.data), n && n(), v.terminate()); | ||
}, v.onerror = (M) => { | ||
console.error(M.message), v.terminate(); | ||
}, v.postMessage(g); | ||
m.onmessage = (I) => { | ||
I.data && (o(I.data), n && n(), m.terminate()); | ||
}, m.onerror = (I) => { | ||
console.error(I.message), m.terminate(); | ||
}, m.postMessage(p); | ||
} | ||
}; | ||
} | ||
const mt = (e, t = 250) => { | ||
const n = y(); | ||
return tt( | ||
const vt = (e, t = 250) => { | ||
const n = S(); | ||
return rt( | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
(...r) => { | ||
const c = () => { | ||
const o = () => { | ||
clearTimeout(n.current), e(...r); | ||
}; | ||
clearTimeout(n.current), n.current = setTimeout(c, t); | ||
clearTimeout(n.current), n.current = setTimeout(o, t); | ||
}, | ||
@@ -259,7 +259,7 @@ [e, t] | ||
}; | ||
const vt = ({ | ||
const dt = ({ | ||
color: e = "#000000", | ||
stroke: t = 2, | ||
className: n | ||
}) => /* @__PURE__ */ a( | ||
}) => /* @__PURE__ */ h( | ||
"svg", | ||
@@ -271,3 +271,3 @@ { | ||
className: n, | ||
children: /* @__PURE__ */ a( | ||
children: /* @__PURE__ */ h( | ||
"path", | ||
@@ -283,6 +283,6 @@ { | ||
} | ||
), Ge = ({ | ||
), We = ({ | ||
color: e = "#FFFFFF", | ||
reflect: t | ||
}) => /* @__PURE__ */ a( | ||
}) => /* @__PURE__ */ h( | ||
"svg", | ||
@@ -293,3 +293,3 @@ { | ||
className: `voice-visualizer__canvas-audio-wave-icon ${t ? "voice-visualizer__canvas-audio-wave-icon2" : ""}`, | ||
children: /* @__PURE__ */ a( | ||
children: /* @__PURE__ */ h( | ||
"path", | ||
@@ -302,10 +302,10 @@ { | ||
} | ||
), Be = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", dt = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", Pe = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", ft = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo="; | ||
function zt(e) { | ||
const t = y(e); | ||
return We(() => { | ||
), ke = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", ft = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", Je = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", zt = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo="; | ||
function gt(e) { | ||
const t = S(e); | ||
return Qe(() => { | ||
t.current = e; | ||
}, [e]), t; | ||
} | ||
const It = rt( | ||
const It = nt( | ||
({ | ||
@@ -317,163 +317,164 @@ controls: { | ||
duration: r, | ||
audioSrc: c, | ||
currentAudioTime: s, | ||
bufferFromRecordedBlob: g, | ||
togglePauseResume: v, | ||
startRecording: M, | ||
stopRecording: u, | ||
saveAudioFile: I, | ||
isAvailableRecordedAudio: f, | ||
isPausedRecordedAudio: w, | ||
audioSrc: o, | ||
currentAudioTime: u, | ||
bufferFromRecordedBlob: p, | ||
togglePauseResume: m, | ||
startRecording: I, | ||
stopRecording: z, | ||
saveAudioFile: g, | ||
isAvailableRecordedAudio: d, | ||
isPausedRecordedAudio: N, | ||
isPausedRecording: L, | ||
isProcessingRecordedAudio: o, | ||
isCleared: p, | ||
formattedDuration: H, | ||
formattedRecordingTime: m, | ||
formattedRecordedAudioCurrentTime: x, | ||
clearCanvas: j, | ||
setCurrentAudioTime: Q, | ||
_setIsProcessingAudioOnComplete: F, | ||
_setIsProcessingOnResize: B | ||
isProcessingRecordedAudio: s, | ||
isCleared: M, | ||
formattedDuration: F, | ||
formattedRecordingTime: v, | ||
formattedRecordedAudioCurrentTime: U, | ||
clearCanvas: C, | ||
setCurrentAudioTime: _, | ||
_isAutoplayPreloadedBlob: x, | ||
_setIsProcessingAudioOnComplete: V, | ||
_setIsProcessingOnResize: A | ||
}, | ||
width: D = "100%", | ||
height: U = 200, | ||
speed: V = 3, | ||
backgroundColor: $ = "transparent", | ||
mainBarColor: R = "#FFFFFF", | ||
secondaryBarColor: q = "#5e5e5e", | ||
barWidth: P = 2, | ||
gap: fe = 1, | ||
rounded: ne = 5, | ||
isControlPanelShown: ie = !0, | ||
isDownloadAudioButtonShown: ce = !1, | ||
animateCurrentPick: X = !0, | ||
fullscreen: oe = !1, | ||
onlyRecording: W = !1, | ||
isDefaultUIShown: se = !0, | ||
defaultMicrophoneIconColor: Te = R, | ||
defaultAudioWaveIconColor: Le = R, | ||
width: q = "100%", | ||
height: Y = 200, | ||
speed: fe = 3, | ||
backgroundColor: H = "transparent", | ||
mainBarColor: $ = "#FFFFFF", | ||
secondaryBarColor: X = "#5e5e5e", | ||
barWidth: W = 2, | ||
gap: ze = 1, | ||
rounded: ce = 5, | ||
isControlPanelShown: oe = !0, | ||
isDownloadAudioButtonShown: se = !1, | ||
animateCurrentPick: K = !0, | ||
fullscreen: ae = !1, | ||
onlyRecording: k = !1, | ||
isDefaultUIShown: ue = !0, | ||
defaultMicrophoneIconColor: Te = $, | ||
defaultAudioWaveIconColor: we = $, | ||
mainContainerClassName: be, | ||
canvasContainerClassName: ze, | ||
isProgressIndicatorShown: Y = !W, | ||
progressIndicatorClassName: d, | ||
isProgressIndicatorTimeShown: C = !0, | ||
progressIndicatorTimeClassName: K, | ||
isProgressIndicatorOnHoverShown: ae = !W, | ||
progressIndicatorOnHoverClassName: G, | ||
isProgressIndicatorTimeOnHoverShown: T = !0, | ||
progressIndicatorTimeOnHoverClassName: _, | ||
isAudioProcessingTextShown: h = !0, | ||
audioProcessingTextClassName: pe, | ||
controlButtonsClassName: we | ||
}, Se) => { | ||
const [ge, Ee] = l(0), [b, Ce] = l(0), [ee, ue] = l(0), [he, _e] = l(0), [le, te] = l(!1), [Ne, ye] = l(window.innerWidth), [re, Ae] = l(!1), i = Ne < 768, A = Math.trunc(V), S = Math.trunc(fe), N = Math.trunc( | ||
i && S > 0 ? P + 1 : P | ||
), O = N + S * N, z = y(null), je = y([]), me = y(A), ve = y(N), Je = y(N), Me = y(null), Qe = zt(Ne), { | ||
result: Ie, | ||
setResult: Ve, | ||
run: qe | ||
} = lt({ | ||
fn: st, | ||
canvasContainerClassName: ge, | ||
isProgressIndicatorShown: Se = !k, | ||
progressIndicatorClassName: Me, | ||
isProgressIndicatorTimeShown: B = !0, | ||
progressIndicatorTimeClassName: f, | ||
isProgressIndicatorOnHoverShown: T = !k, | ||
progressIndicatorOnHoverClassName: ee, | ||
isProgressIndicatorTimeOnHoverShown: he = !0, | ||
progressIndicatorTimeOnHoverClassName: G, | ||
isAudioProcessingTextShown: j = !0, | ||
audioProcessingTextClassName: b, | ||
controlButtonsClassName: a | ||
}, le) => { | ||
const [pe, Ee] = l(0), [E, Ce] = l(0), [me, _e] = l(0), [te, ve] = l(0), [re, Ne] = l(!1), [ye, ne] = l(window.innerWidth), [Ae, Ie] = l(!1), je = ye < 768, Le = Math.trunc(fe), P = Math.trunc(ze), O = Math.trunc( | ||
je && P > 0 ? W + 1 : W | ||
), i = O + P * O, c = S(null), y = S([]), w = S(Le), xe = S(O), He = S(O), ie = S(null), $e = gt(ye), { | ||
result: R, | ||
setResult: qe, | ||
run: Xe | ||
} = mt({ | ||
fn: at, | ||
initialValue: [], | ||
onMessageReceived: Ke | ||
}), Xe = mt(xe); | ||
onMessageReceived: et | ||
}), Ke = vt(Oe); | ||
Z(() => { | ||
xe(); | ||
const E = () => { | ||
Qe.current !== window.innerWidth && (f ? (ye(window.innerWidth), B(!0), Ae(!0), Xe()) : (ye(window.innerWidth), xe())); | ||
Oe(); | ||
const D = () => { | ||
$e.current !== window.innerWidth && (d ? (ne(window.innerWidth), A(!0), Ie(!0), Ke()) : (ne(window.innerWidth), Oe())); | ||
}; | ||
return window.addEventListener("resize", E), () => { | ||
window.removeEventListener("resize", E); | ||
return window.addEventListener("resize", D), () => { | ||
window.removeEventListener("resize", D); | ||
}; | ||
}, [D, f]), We(() => { | ||
z.current && ((me.current >= A || !e.length) && (me.current = 0, ct({ | ||
}, [q, d]), Qe(() => { | ||
c.current && ((w.current >= Le || !e.length) && (w.current = 0, ot({ | ||
audioData: e, | ||
unit: O, | ||
index: ve, | ||
index2: Je, | ||
canvas: z.current, | ||
picks: je.current, | ||
unit: i, | ||
index: xe, | ||
index2: He, | ||
canvas: c.current, | ||
picks: y.current, | ||
isRecordingInProgress: t, | ||
isPausedRecording: L, | ||
backgroundColor: $, | ||
mainBarColor: R, | ||
secondaryBarColor: q, | ||
barWidth: N, | ||
rounded: ne, | ||
animateCurrentPick: X, | ||
fullscreen: oe | ||
})), me.current += 1); | ||
backgroundColor: H, | ||
mainBarColor: $, | ||
secondaryBarColor: X, | ||
barWidth: O, | ||
rounded: ce, | ||
animateCurrentPick: K, | ||
fullscreen: ae | ||
})), w.current += 1); | ||
}, [ | ||
z.current, | ||
c.current, | ||
e, | ||
N, | ||
O, | ||
H, | ||
$, | ||
R, | ||
q, | ||
ne, | ||
oe, | ||
se, | ||
he | ||
X, | ||
ce, | ||
ae, | ||
ue, | ||
te | ||
]), Z(() => { | ||
var E, k; | ||
if (f) | ||
return le ? (E = z.current) == null || E.addEventListener("mouseleave", Re) : (k = z.current) == null || k.addEventListener("mouseenter", $e), () => { | ||
var J, Fe; | ||
le ? (J = z.current) == null || J.removeEventListener( | ||
var D, J; | ||
if (d) | ||
return re ? (D = c.current) == null || D.addEventListener("mouseleave", Fe) : (J = c.current) == null || J.addEventListener("mouseenter", Ze), () => { | ||
var Q, Be; | ||
re ? (Q = c.current) == null || Q.removeEventListener( | ||
"mouseleave", | ||
Re | ||
) : (Fe = z.current) == null || Fe.removeEventListener( | ||
Fe | ||
) : (Be = c.current) == null || Be.removeEventListener( | ||
"mouseenter", | ||
$e | ||
Ze | ||
); | ||
}; | ||
}, [le, f]), Z(() => { | ||
var k; | ||
if (!g || !z.current || t || re) | ||
}, [re, d]), Z(() => { | ||
var J; | ||
if (!p || !c.current || t || Ae) | ||
return; | ||
if (W) { | ||
j(); | ||
if (k) { | ||
C(); | ||
return; | ||
} | ||
je.current = []; | ||
const E = g.getChannelData(0); | ||
return qe({ | ||
bufferData: E, | ||
height: ee, | ||
width: he, | ||
barWidth: N, | ||
gap: S | ||
}), (k = z.current) == null || k.addEventListener( | ||
y.current = []; | ||
const D = p.getChannelData(0); | ||
return Xe({ | ||
bufferData: D, | ||
height: me, | ||
width: te, | ||
barWidth: O, | ||
gap: P | ||
}), (J = c.current) == null || J.addEventListener( | ||
"mousemove", | ||
Oe | ||
Ue | ||
), () => { | ||
var J; | ||
(J = z.current) == null || J.removeEventListener( | ||
var Q; | ||
(Q = c.current) == null || Q.removeEventListener( | ||
"mousemove", | ||
Oe | ||
Ue | ||
); | ||
}; | ||
}, [ | ||
g, | ||
b, | ||
ee, | ||
fe, | ||
P, | ||
re | ||
p, | ||
E, | ||
me, | ||
ze, | ||
W, | ||
Ae | ||
]), Z(() => { | ||
if (!(W || !(Ie != null && Ie.length) || !z.current || o)) { | ||
if (p) { | ||
Ve([]); | ||
if (!(k || !(R != null && R.length) || !c.current || s)) { | ||
if (M) { | ||
qe([]); | ||
return; | ||
} | ||
nt({ | ||
barsData: Ie, | ||
canvas: z.current, | ||
barWidth: N, | ||
gap: S, | ||
backgroundColor: $, | ||
mainBarColor: R, | ||
secondaryBarColor: q, | ||
currentAudioTime: s, | ||
rounded: ne, | ||
it({ | ||
barsData: R, | ||
canvas: c.current, | ||
barWidth: O, | ||
gap: P, | ||
backgroundColor: H, | ||
mainBarColor: $, | ||
secondaryBarColor: X, | ||
currentAudioTime: u, | ||
rounded: ce, | ||
duration: r | ||
@@ -483,44 +484,44 @@ }); | ||
}, [ | ||
Ie, | ||
s, | ||
p, | ||
ne, | ||
R, | ||
u, | ||
M, | ||
ce, | ||
H, | ||
$, | ||
R, | ||
q | ||
X | ||
]), Z(() => { | ||
o && z.current && He({ | ||
canvas: z.current, | ||
backgroundColor: $ | ||
s && c.current && Re({ | ||
canvas: c.current, | ||
backgroundColor: H | ||
}); | ||
}, [o]); | ||
function xe() { | ||
if (!Me.current || !z.current) | ||
}, [s]); | ||
function Oe() { | ||
if (!ie.current || !c.current) | ||
return; | ||
me.current = A; | ||
const E = Math.trunc( | ||
Me.current.clientHeight * window.devicePixelRatio / 2 | ||
w.current = Le; | ||
const D = Math.trunc( | ||
ie.current.clientHeight * window.devicePixelRatio / 2 | ||
) * 2; | ||
Ce(Me.current.clientWidth), ue(E), _e( | ||
Ce(ie.current.clientWidth), _e(D), ve( | ||
Math.round( | ||
Me.current.clientWidth * window.devicePixelRatio | ||
ie.current.clientWidth * window.devicePixelRatio | ||
) | ||
), Ae(!1); | ||
), Ie(!1); | ||
} | ||
function Ke() { | ||
B(!1), F(!1); | ||
function et() { | ||
A(!1), V(!1), x && m(); | ||
} | ||
const $e = () => { | ||
te(!0); | ||
}, Re = () => { | ||
te(!1); | ||
}, Oe = (E) => { | ||
Ee(E.offsetX); | ||
}, et = (E) => { | ||
const k = Se; | ||
if (k.current && z.current) { | ||
const J = r / b * (E.clientX - z.current.getBoundingClientRect().left); | ||
k.current.currentTime = J, Q(J); | ||
const Ze = () => { | ||
Ne(!0); | ||
}, Fe = () => { | ||
Ne(!1); | ||
}, Ue = (D) => { | ||
Ee(D.offsetX); | ||
}, tt = (D) => { | ||
const J = le; | ||
if (J.current && c.current) { | ||
const Q = r / E * (D.clientX - c.current.getBoundingClientRect().left); | ||
J.current.currentTime = Q, _(Q); | ||
} | ||
}, Ze = s / r * b; | ||
}, Ye = u / r * E; | ||
return /* @__PURE__ */ de("div", { className: `voice-visualizer ${be ?? ""}`, children: [ | ||
@@ -530,16 +531,16 @@ /* @__PURE__ */ de( | ||
{ | ||
className: `voice-visualizer__canvas-container ${ze ?? ""}`, | ||
ref: Me, | ||
style: { width: Ye(D) }, | ||
className: `voice-visualizer__canvas-container ${ge ?? ""}`, | ||
ref: ie, | ||
style: { width: Pe(q) }, | ||
children: [ | ||
/* @__PURE__ */ a( | ||
/* @__PURE__ */ h( | ||
"canvas", | ||
{ | ||
ref: z, | ||
width: he, | ||
height: ee, | ||
onClick: et, | ||
ref: c, | ||
width: te, | ||
height: me, | ||
onClick: tt, | ||
style: { | ||
height: Ye(U), | ||
width: b | ||
height: Pe(Y), | ||
width: E | ||
}, | ||
@@ -549,12 +550,12 @@ children: "Your browser does not support HTML5 Canvas." | ||
), | ||
se && p && /* @__PURE__ */ de(Ue, { children: [ | ||
/* @__PURE__ */ a(Ge, { color: Le }), | ||
/* @__PURE__ */ a(Ge, { color: Le, reflect: !0 }), | ||
/* @__PURE__ */ a( | ||
ue && M && /* @__PURE__ */ de(Ge, { children: [ | ||
/* @__PURE__ */ h(We, { color: we }), | ||
/* @__PURE__ */ h(We, { color: we, reflect: !0 }), | ||
/* @__PURE__ */ h( | ||
"button", | ||
{ | ||
onClick: M, | ||
onClick: I, | ||
className: "voice-visualizer__canvas-microphone-btn", | ||
children: /* @__PURE__ */ a( | ||
vt, | ||
children: /* @__PURE__ */ h( | ||
dt, | ||
{ | ||
@@ -569,25 +570,25 @@ color: Te, | ||
] }), | ||
h && o && /* @__PURE__ */ a( | ||
j && s && /* @__PURE__ */ h( | ||
"p", | ||
{ | ||
className: `voice-visualizer__canvas-audio-processing ${pe ?? ""}`, | ||
style: { color: R }, | ||
className: `voice-visualizer__canvas-audio-processing ${b ?? ""}`, | ||
style: { color: $ }, | ||
children: "Processing Audio..." | ||
} | ||
), | ||
le && f && !o && !i && ae && /* @__PURE__ */ a( | ||
re && d && !s && !je && T && /* @__PURE__ */ h( | ||
"div", | ||
{ | ||
className: `voice-visualizer__progress-indicator-hovered ${G ?? ""}`, | ||
className: `voice-visualizer__progress-indicator-hovered ${ee ?? ""}`, | ||
style: { | ||
left: ge | ||
left: pe | ||
}, | ||
children: T && /* @__PURE__ */ a( | ||
children: he && /* @__PURE__ */ h( | ||
"p", | ||
{ | ||
className: `voice-visualizer__progress-indicator-hovered-time | ||
${b - ge < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""} | ||
${_ ?? ""}`, | ||
children: ke( | ||
r / b * ge | ||
${E - pe < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""} | ||
${G ?? ""}`, | ||
children: Ve( | ||
r / E * pe | ||
) | ||
@@ -598,14 +599,14 @@ } | ||
), | ||
Y && f && !o && r ? /* @__PURE__ */ a( | ||
Se && d && !s && r ? /* @__PURE__ */ h( | ||
"div", | ||
{ | ||
className: `voice-visualizer__progress-indicator ${d ?? ""}`, | ||
className: `voice-visualizer__progress-indicator ${Me ?? ""}`, | ||
style: { | ||
left: Ze < b - 1 ? Ze : b - 1 | ||
left: Ye < E - 1 ? Ye : E - 1 | ||
}, | ||
children: C && /* @__PURE__ */ a( | ||
children: B && /* @__PURE__ */ h( | ||
"p", | ||
{ | ||
className: `voice-visualizer__progress-indicator-time ${b - s * b / r < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${K ?? ""}`, | ||
children: x | ||
className: `voice-visualizer__progress-indicator-time ${E - u * E / r < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${f ?? ""}`, | ||
children: U | ||
} | ||
@@ -618,17 +619,17 @@ ) | ||
), | ||
ie && /* @__PURE__ */ de(Ue, { children: [ | ||
oe && /* @__PURE__ */ de(Ge, { children: [ | ||
/* @__PURE__ */ de("div", { className: "voice-visualizer__audio-info-container", children: [ | ||
t && /* @__PURE__ */ a("p", { className: "voice-visualizer__audio-info-time", children: m }), | ||
r && !o ? /* @__PURE__ */ a("p", { children: H }) : null | ||
t && /* @__PURE__ */ h("p", { className: "voice-visualizer__audio-info-time", children: v }), | ||
r && !s ? /* @__PURE__ */ h("p", { children: F }) : null | ||
] }), | ||
/* @__PURE__ */ de("div", { className: "voice-visualizer__buttons-container", children: [ | ||
t && /* @__PURE__ */ a( | ||
t && /* @__PURE__ */ h( | ||
"button", | ||
{ | ||
className: `voice-visualizer__btn-left ${L ? "voice-visualizer__btn-left-microphone" : ""}`, | ||
onClick: v, | ||
children: /* @__PURE__ */ a( | ||
onClick: m, | ||
children: /* @__PURE__ */ h( | ||
"img", | ||
{ | ||
src: L ? Be : Pe, | ||
src: L ? ke : Je, | ||
alt: L ? "Play" : "Pause" | ||
@@ -639,13 +640,13 @@ } | ||
), | ||
!p && /* @__PURE__ */ a( | ||
!M && /* @__PURE__ */ h( | ||
"button", | ||
{ | ||
className: `voice-visualizer__btn-left ${t ? "voice-visualizer__visually-hidden" : ""}`, | ||
onClick: v, | ||
disabled: o, | ||
children: /* @__PURE__ */ a( | ||
onClick: m, | ||
disabled: s, | ||
children: /* @__PURE__ */ h( | ||
"img", | ||
{ | ||
src: w ? dt : Pe, | ||
alt: w ? "Play" : "Pause" | ||
src: N ? ft : Je, | ||
alt: N ? "Play" : "Pause" | ||
} | ||
@@ -655,33 +656,33 @@ ) | ||
), | ||
p && /* @__PURE__ */ a( | ||
M && /* @__PURE__ */ h( | ||
"button", | ||
{ | ||
className: "voice-visualizer__btn-center", | ||
onClick: M, | ||
children: /* @__PURE__ */ a("img", { src: Be, alt: "Microphone" }) | ||
onClick: I, | ||
children: /* @__PURE__ */ h("img", { src: ke, alt: "Microphone" }) | ||
} | ||
), | ||
/* @__PURE__ */ a( | ||
/* @__PURE__ */ h( | ||
"button", | ||
{ | ||
className: `voice-visualizer__btn-center voice-visualizer__btn-center-pause ${t ? "" : "voice-visualizer__visually-hidden"}`, | ||
onClick: u, | ||
children: /* @__PURE__ */ a("img", { src: ft, alt: "Stop" }) | ||
onClick: z, | ||
children: /* @__PURE__ */ h("img", { src: zt, alt: "Stop" }) | ||
} | ||
), | ||
!p && /* @__PURE__ */ a( | ||
!M && /* @__PURE__ */ h( | ||
"button", | ||
{ | ||
onClick: j, | ||
className: `voice-visualizer__btn ${we ?? ""}`, | ||
disabled: o, | ||
onClick: C, | ||
className: `voice-visualizer__btn ${a ?? ""}`, | ||
disabled: s, | ||
children: "Clear" | ||
} | ||
), | ||
ce && n && /* @__PURE__ */ a( | ||
se && n && /* @__PURE__ */ h( | ||
"button", | ||
{ | ||
onClick: I, | ||
className: `voice-visualizer__btn ${we ?? ""}`, | ||
disabled: o, | ||
onClick: g, | ||
className: `voice-visualizer__btn ${a ?? ""}`, | ||
disabled: s, | ||
children: "Download Audio" | ||
@@ -692,7 +693,7 @@ } | ||
] }), | ||
f && /* @__PURE__ */ a( | ||
/* @__PURE__ */ h( | ||
"audio", | ||
{ | ||
ref: Se, | ||
src: c, | ||
ref: le, | ||
src: d ? o : "", | ||
controls: !0, | ||
@@ -710,151 +711,166 @@ style: { display: "none" } | ||
onResumedRecording: r, | ||
onClearCanvas: c, | ||
onEndAudioPlayback: s, | ||
onStartAudioPlayback: g, | ||
onPausedAudioPlayback: v, | ||
onResumedAudioPlayback: M | ||
onClearCanvas: o, | ||
onEndAudioPlayback: u, | ||
onStartAudioPlayback: p, | ||
onPausedAudioPlayback: m, | ||
onResumedAudioPlayback: I, | ||
onErrorPlayingAudio: z | ||
} = {}) { | ||
const [u, I] = l(!1), [f, w] = l(!1), [L, o] = l(null), [p, H] = l(new Uint8Array(0)), [m, x] = l(!1), [j, Q] = l(null), [F, B] = l(null), [D, U] = l(0), [V, $] = l(0), [R, q] = l(0), [P, fe] = l(""), [ne, ie] = l(!0), [ce, X] = l(0), [oe, W] = l(!0), [se, Te] = l(!1), [Le, be] = l(!1), [ze, Y] = l(null), d = y(null), C = y(null), K = y(null), ae = y(null), G = y(null), T = y(null), _ = y(null), h = y(null), pe = !!(F && !m), we = ut(R), Se = ot(D), ge = ke(ce), Ee = Le || m; | ||
const [g, d] = l(!1), [N, L] = l(!1), [s, M] = l(null), [F, v] = l(new Uint8Array(0)), [U, C] = l(!1), [_, x] = l(null), [V, A] = l(null), [q, Y] = l(0), [fe, H] = l(0), [$, X] = l(0), [W, ze] = l(""), [ce, oe] = l(!0), [se, K] = l(0), [ae, k] = l(!0), [ue, Te] = l(!1), [we, be] = l(!1), [ge, Se] = l(!1), [Me, B] = l(null), f = S(null), T = S(null), ee = S(null), he = S(null), G = S(null), j = S(null), b = S(null), a = S(null), le = !!(V && !U), pe = ht($), Ee = st(q), E = Ve(se), Ce = we || U; | ||
Z(() => { | ||
if (!u || f) | ||
if (!g || N) | ||
return; | ||
const A = setInterval(() => { | ||
const S = performance.now(); | ||
U((N) => N + (S - V)), $(S); | ||
const c = setInterval(() => { | ||
const y = performance.now(); | ||
Y((w) => w + (y - fe)), H(y); | ||
}, 1e3); | ||
return () => clearInterval(A); | ||
}, [V, f, u]), Z(() => { | ||
if (!j || j.size === 0) | ||
return () => clearInterval(c); | ||
}, [fe, N, g]), Z(() => { | ||
if (!_ || _.size === 0) | ||
return; | ||
(async () => { | ||
var A; | ||
var c; | ||
try { | ||
Y(null); | ||
const S = new Blob([j], { | ||
type: (A = d.current) == null ? void 0 : A.mimeType | ||
}), N = URL.createObjectURL(S); | ||
N && fe(N); | ||
const O = await j.arrayBuffer(), z = new AudioContext(), je = (ve) => { | ||
B(ve), q(ve.duration - 0.06); | ||
}, me = (ve) => { | ||
Y(ve); | ||
B(null); | ||
const y = new Blob([_], { | ||
type: (c = f.current) == null ? void 0 : c.mimeType | ||
}), w = URL.createObjectURL(y); | ||
w && ze(w); | ||
const xe = await _.arrayBuffer(), He = new AudioContext(), ie = (R) => { | ||
A(R), X(R.duration - 0.06); | ||
}, $e = (R) => { | ||
B(R); | ||
}; | ||
z.decodeAudioData( | ||
O, | ||
je, | ||
me | ||
He.decodeAudioData( | ||
xe, | ||
ie, | ||
$e | ||
); | ||
} catch (S) { | ||
if (console.error("Error processing the audio blob:", S), S instanceof Error) { | ||
Y(S); | ||
} catch (y) { | ||
if (console.error("Error processing the audio blob:", y), y instanceof Error) { | ||
B(y); | ||
return; | ||
} | ||
Y(new Error("Error processing the audio blob")); | ||
B(new Error("Error processing the audio blob")); | ||
} | ||
})(); | ||
}, [j]), Z(() => { | ||
if (ze) { | ||
te(); | ||
}, [_]), Z(() => { | ||
if (Me) { | ||
ne(); | ||
return; | ||
} | ||
}, [ze]), Z(() => () => { | ||
_.current && cancelAnimationFrame(_.current), G.current && G.current.disconnect(), C.current && C.current.state !== "closed" && C.current.close(), T.current && cancelAnimationFrame(T.current), h != null && h.current && h.current.removeEventListener("ended", re), d.current && d.current.removeEventListener( | ||
}, [Me]), Z(() => () => { | ||
b.current && cancelAnimationFrame(b.current), j.current && cancelAnimationFrame(j.current), G.current && G.current.disconnect(), T.current && T.current.state !== "closed" && T.current.close(), a != null && a.current && a.current.removeEventListener("ended", P), f.current && f.current.removeEventListener( | ||
"dataavailable", | ||
ue | ||
ve | ||
); | ||
}, []), Z(() => (!oe && !se && window.addEventListener("beforeunload", b), () => { | ||
window.removeEventListener("beforeunload", b); | ||
}), [oe, se]); | ||
const b = (i) => { | ||
}, []), Z(() => (!ae && !ue && window.addEventListener("beforeunload", me), () => { | ||
window.removeEventListener("beforeunload", me); | ||
}), [ae, ue]); | ||
const me = (i) => { | ||
i.preventDefault(), i.returnValue = ""; | ||
}, Ce = () => { | ||
}, _e = () => { | ||
navigator.mediaDevices.getUserMedia({ audio: !0 }).then((i) => { | ||
te(), W(!1), $(performance.now()), I(!0), o(i), C.current = new window.AudioContext(), K.current = C.current.createAnalyser(), ae.current = new Uint8Array( | ||
K.current.frequencyBinCount | ||
), G.current = C.current.createMediaStreamSource(i), G.current.connect(K.current), d.current = new MediaRecorder(i), d.current.addEventListener( | ||
ne(), k(!1), H(performance.now()), d(!0), M(i), T.current = new window.AudioContext(), ee.current = T.current.createAnalyser(), he.current = new Uint8Array( | ||
ee.current.frequencyBinCount | ||
), G.current = T.current.createMediaStreamSource(i), G.current.connect(ee.current), f.current = new MediaRecorder(i), f.current.addEventListener( | ||
"dataavailable", | ||
ue | ||
), d.current.start(), ee(); | ||
ve | ||
), f.current.start(), te(); | ||
}).catch((i) => { | ||
if (console.error("Error starting audio recording:", i), i instanceof Error) { | ||
Y(i); | ||
B(i); | ||
return; | ||
} | ||
Y(new Error("Error starting audio recording")); | ||
B(new Error("Error starting audio recording")); | ||
}); | ||
}, ee = () => { | ||
K.current.getByteTimeDomainData(ae.current), H(new Uint8Array(ae.current)), T.current = requestAnimationFrame(ee); | ||
}, ue = (i) => { | ||
d.current && Q(i.data); | ||
}, he = () => { | ||
_.current && cancelAnimationFrame(_.current), h.current && (X(h.current.currentTime), _.current = requestAnimationFrame(he)); | ||
}, _e = () => { | ||
u || (e && e(), Ce()); | ||
}, le = () => { | ||
u && (t && t(), x(!0), I(!1), U(0), w(!1), T.current && cancelAnimationFrame(T.current), G.current && G.current.disconnect(), C.current && C.current.state !== "closed" && C.current.close(), L == null || L.getTracks().forEach((i) => i.stop()), d.current && (d.current.stop(), d.current.removeEventListener( | ||
}, te = () => { | ||
ee.current.getByteTimeDomainData(he.current), v(new Uint8Array(he.current)), j.current = requestAnimationFrame(te); | ||
}, ve = (i) => { | ||
f.current && x(i.data); | ||
}, re = () => { | ||
a.current && (K(a.current.currentTime), b.current = requestAnimationFrame(re)); | ||
}, Ne = () => { | ||
g || (e && e(), _e()); | ||
}, ye = () => { | ||
g && (t && t(), C(!0), d(!1), Y(0), L(!1), j.current && cancelAnimationFrame(j.current), G.current && G.current.disconnect(), T.current && T.current.state !== "closed" && T.current.close(), s == null || s.getTracks().forEach((i) => i.stop()), f.current && (f.current.stop(), f.current.removeEventListener( | ||
"dataavailable", | ||
ue | ||
ve | ||
))); | ||
}, te = () => { | ||
T.current && cancelAnimationFrame(T.current), h != null && h.current && h.current.removeEventListener("ended", re), _.current && cancelAnimationFrame(_.current), d.current && (d.current.removeEventListener( | ||
}, ne = () => { | ||
j.current && cancelAnimationFrame(j.current), a != null && a.current && a.current.removeEventListener("ended", P), b.current && cancelAnimationFrame(b.current), f.current && (f.current.removeEventListener( | ||
"dataavailable", | ||
ue | ||
), d.current.stop(), d.current = null), L == null || L.getTracks().forEach((i) => i.stop()), d.current = null, C.current = null, K.current = null, ae.current = null, G.current = null, T.current = null, _.current = null, c && c(), o(null), I(!1), x(!1), Q(null), B(null), U(0), $(0), q(0), fe(""), X(0), ie(!0), w(!1), H(new Uint8Array(0)), Y(null), W(!0); | ||
}, Ne = (i) => { | ||
i instanceof Blob && (te(), Te(!0), W(!1), x(!0), I(!1), U(0), w(!1), Q(i)); | ||
}, ye = () => { | ||
var i, A, S, N; | ||
if (u) { | ||
w((O) => !O), ((i = d.current) == null ? void 0 : i.state) === "recording" ? (n && n(), (A = d.current) == null || A.pause(), U((O) => O + (performance.now() - V)), T.current && cancelAnimationFrame(T.current)) : (r && r(), (S = d.current) == null || S.resume(), $(performance.now()), T.current = requestAnimationFrame(ee)); | ||
ve | ||
), f.current.stop(), f.current = null), s == null || s.getTracks().forEach((i) => i.stop()), f.current = null, T.current = null, ee.current = null, he.current = null, G.current = null, j.current = null, b.current = null, o && o(), M(null), d(!1), C(!1), x(null), A(null), Y(0), H(0), X(0), ze(""), K(0), oe(!0), L(!1), v(new Uint8Array(0)), B(null), k(!0); | ||
}, Ae = (i) => { | ||
i instanceof Blob && (ne(), Te(!0), k(!1), C(!0), d(!1), Y(0), L(!1), x(i)); | ||
}, Ie = () => { | ||
if (a.current && a.current.paused) { | ||
const i = a.current.play(); | ||
i !== void 0 && i.catch((c) => { | ||
console.error(c), z && z( | ||
c instanceof Error ? c : new Error("Error playing audio") | ||
); | ||
}); | ||
} | ||
}, je = () => { | ||
Se(!0), Ie(); | ||
}, Le = () => { | ||
var i, c, y; | ||
if (g) { | ||
L((w) => !w), ((i = f.current) == null ? void 0 : i.state) === "recording" ? (n && n(), (c = f.current) == null || c.pause(), Y((w) => w + (performance.now() - fe)), j.current && cancelAnimationFrame(j.current)) : (r && r(), (y = f.current) == null || y.resume(), H(performance.now()), j.current = requestAnimationFrame(te)); | ||
return; | ||
} | ||
if (h.current && pe) | ||
if (_.current && cancelAnimationFrame(_.current), h.current.paused) | ||
g && ce === 0 && g(), M && ce !== 0 && M(), h.current.addEventListener("ended", re), he(), ie(!1), (N = h.current) == null || N.play(); | ||
else { | ||
v && v(), h.current.removeEventListener("ended", re), h.current.pause(), ie(!0); | ||
const O = h.current.currentTime; | ||
X(O), h.current.currentTime = O; | ||
if (a.current) { | ||
if (a.current.paused && le || ge) | ||
p && se === 0 && p(), I && se !== 0 && I(), a.current.addEventListener("ended", P), oe(!1), requestAnimationFrame(re), ge || Ie(), Se(!1); | ||
else if (!a.current.paused && le) { | ||
b.current && cancelAnimationFrame(b.current), m && m(), a.current.removeEventListener("ended", P), a.current.pause(), oe(!0); | ||
const w = a.current.currentTime; | ||
K(w), a.current.currentTime = w; | ||
} | ||
}, re = () => { | ||
ie(!0), s && s(), h != null && h.current && (h.current.currentTime = 0, X(0)); | ||
}, Ae = () => { | ||
var A; | ||
if (!P) | ||
} | ||
}, P = () => { | ||
b.current && cancelAnimationFrame(b.current), oe(!0), u && u(), a != null && a.current && (a.current.currentTime = 0, K(0)); | ||
}, O = () => { | ||
var c; | ||
if (!W) | ||
return; | ||
const i = document.createElement("a"); | ||
i.href = P, i.download = `recorded_audio${at( | ||
(A = d.current) == null ? void 0 : A.mimeType | ||
)}`, document.body.appendChild(i), i.click(), document.body.removeChild(i), URL.revokeObjectURL(P); | ||
i.href = W, i.download = `recorded_audio${ut( | ||
(c = f.current) == null ? void 0 : c.mimeType | ||
)}`, document.body.appendChild(i), i.click(), document.body.removeChild(i), URL.revokeObjectURL(W); | ||
}; | ||
return { | ||
isRecordingInProgress: u, | ||
isPausedRecording: f, | ||
audioData: p, | ||
recordingTime: D, | ||
isProcessingRecordedAudio: Ee, | ||
recordedBlob: j, | ||
mediaRecorder: d.current, | ||
duration: R, | ||
currentAudioTime: ce, | ||
audioSrc: P, | ||
isPausedRecordedAudio: ne, | ||
bufferFromRecordedBlob: F, | ||
isCleared: oe, | ||
isAvailableRecordedAudio: pe, | ||
isPreloadedBlob: se, | ||
formattedDuration: we, | ||
formattedRecordingTime: Se, | ||
formattedRecordedAudioCurrentTime: ge, | ||
setPreloadedAudioBlob: Ne, | ||
startRecording: _e, | ||
togglePauseResume: ye, | ||
stopRecording: le, | ||
saveAudioFile: Ae, | ||
clearCanvas: te, | ||
setCurrentAudioTime: X, | ||
error: ze, | ||
_setIsProcessingAudioOnComplete: x, | ||
isRecordingInProgress: g, | ||
isPausedRecording: N, | ||
audioData: F, | ||
recordingTime: q, | ||
isProcessingRecordedAudio: Ce, | ||
recordedBlob: _, | ||
mediaRecorder: f.current, | ||
duration: $, | ||
currentAudioTime: se, | ||
audioSrc: W, | ||
isPausedRecordedAudio: ce, | ||
bufferFromRecordedBlob: V, | ||
isCleared: ae, | ||
isAvailableRecordedAudio: le, | ||
isPreloadedBlob: ue, | ||
formattedDuration: pe, | ||
formattedRecordingTime: Ee, | ||
formattedRecordedAudioCurrentTime: E, | ||
setPreloadedAudioBlob: Ae, | ||
onClickAutoplayAudioOnLoad: je, | ||
startRecording: Ne, | ||
togglePauseResume: Le, | ||
stopRecording: ye, | ||
saveAudioFile: O, | ||
clearCanvas: ne, | ||
setCurrentAudioTime: K, | ||
error: Me, | ||
_isAutoplayPreloadedBlob: ge, | ||
_setIsProcessingAudioOnComplete: C, | ||
_setIsProcessingOnResize: be, | ||
audioRef: h | ||
audioRef: a | ||
}; | ||
@@ -861,0 +877,0 @@ } |
@@ -26,2 +26,3 @@ import { Dispatch, MutableRefObject, SetStateAction } from "react"; | ||
formattedRecordedAudioCurrentTime: string; | ||
onClickAutoplayAudioOnLoad: () => void; | ||
startRecording: () => void; | ||
@@ -34,2 +35,3 @@ togglePauseResume: () => void; | ||
error: Error | null; | ||
_isAutoplayPreloadedBlob: boolean; | ||
_setIsProcessingAudioOnComplete: Dispatch<SetStateAction<boolean>>; | ||
@@ -109,2 +111,3 @@ _setIsProcessingOnResize: Dispatch<SetStateAction<boolean>>; | ||
onResumedAudioPlayback?: () => void; | ||
onErrorPlayingAudio?: (error: Error) => void; | ||
} | ||
@@ -111,0 +114,0 @@ export interface UseWebWorkerParams<T> { |
{ | ||
"name": "react-voice-visualizer", | ||
"private": false, | ||
"version": "1.3.8", | ||
"version": "1.4.0", | ||
"type": "module", | ||
@@ -6,0 +6,0 @@ "author": "Yurii Zarytskyi", |
200
README.md
@@ -47,3 +47,3 @@ # react-voice-visualizer [Demo App](https://react-voice-visualizer.vercel.app/) | ||
```jsx | ||
```typescript jsx | ||
import { useEffect } from "react"; | ||
@@ -73,3 +73,3 @@ import { useVoiceVisualizer, VoiceVisualizer } from "react-voice-visualizer"; | ||
console.log(error); | ||
console.error(error); | ||
}, [error]); | ||
@@ -90,4 +90,5 @@ | ||
``` | ||
Example: | ||
```jsx | ||
```typescript jsx | ||
import { useEffect } from 'react'; | ||
@@ -101,2 +102,4 @@ import { useVoiceVisualizer, VoiceVisualizer } from 'react-voice-visualizer'; | ||
setPreloadedAudioBlob, | ||
togglePauseResume, | ||
isAvailableRecordedAudio, | ||
isPreloadedBlob, | ||
@@ -109,3 +112,3 @@ error, | ||
// Set the preloaded audioBlob when the component mounts | ||
// Assuming 'audioBlob' is defined somewhere | ||
// Assuming 'audioBlob' is defined somewhere above | ||
if (audioBlob) { | ||
@@ -120,11 +123,25 @@ setPreloadedAudioBlob(audioBlob); | ||
console.log(error); | ||
console.error(error); | ||
}, [error]); | ||
// Function to handle audio playback | ||
const handleUserClickToPlayAudio = () => { | ||
if (isAvailableRecordedAudio) { | ||
togglePauseResume(); | ||
} | ||
}; | ||
return ( | ||
<VoiceVisualizer | ||
isControlPanelShown={false} // Set to 'false' in most cases, but should be determined based on the specific user's use case. | ||
controls={recorderControls} | ||
ref={audioRef} | ||
/> | ||
<div> | ||
{/* Button to initiate audio playback */} | ||
<button onClick={handleUserClickToPlayAudio}>Click to Toggle Play Audio</button> | ||
<VoiceVisualizer | ||
isControlPanelShown={false} // Set to 'false' in most cases. You should use your own UI. | ||
isDefaultUIShown={false} // Set to 'false' in most cases, but should be determined based on the specific user's use case. | ||
controls={recorderControls} | ||
ref={audioRef} | ||
/> | ||
</div> | ||
); | ||
@@ -136,2 +153,78 @@ }; | ||
##### Autoplay Audio on Load | ||
If you want the audio to autoplay as soon as it becomes available upon a user's click, please refer to the following example. | ||
This example illustrates how to utilize the `onClickAutoplayAudioOnLoad` and `setPreloadedAudioBlob` functions to enable audio playback only when a user initiates it by clicking a button; otherwise, it may cause error during the execution of `audio.play()` that can be handled using the `onErrorPlayingAudio` callback, which you can pass as a parameter to `useVoiceVisualizerParams`. | ||
By clicking the "Click to Play Audio" button, audio data is fetched and prepared for playback, ensuring compliance with autoplay policies. This approach guarantees that audio begins playing in response to a user's interaction, providing a seamless and policy-compliant audio experience. | ||
Example: | ||
```typescript jsx | ||
import { useEffect } from 'react'; | ||
import { useVoiceVisualizer, VoiceVisualizer } from 'react-voice-visualizer'; | ||
const App = ({audioUrl}) => { | ||
const recorderControls = useVoiceVisualizer(); | ||
const { | ||
// ... (Extracted controls and states, if necessary) | ||
setPreloadedAudioBlob, | ||
onClickAutoplayAudioOnLoad, // Import the onClickAutoplayAudioOnLoad function | ||
togglePauseResume, | ||
isAvailableRecordedAudio, | ||
isPreloadedBlob, | ||
error, | ||
audioRef, | ||
} = recorderControls; | ||
// Get and log any error when it occurs | ||
useEffect(() => { | ||
if (!error) return; | ||
console.error(error); | ||
}, [error]); | ||
// Function to handle user click event for audio autoplay | ||
const handleUserClickToAutoplayAudio = () => { | ||
if (isAvailableRecordedAudio) { | ||
togglePauseResume(); | ||
} else { | ||
// Fetch the audio data and trigger autoplay upon user interaction | ||
onClickAutoplayAudioOnLoad(); // Call the onClickAutoplayAudioOnLoad function | ||
fetch(audioUrl) | ||
.then((response) => { | ||
if (!response.ok) { | ||
throw new Error('The network response was not successful'); | ||
} | ||
return response.blob() | ||
}) | ||
.then((blob) => { | ||
setPreloadedAudioBlob(blob); // Set blob | ||
}) | ||
.catch((err) => { | ||
// Handle errors, both network-related and those that occur during blob retrieval | ||
console.error(err); | ||
}); | ||
} | ||
}; | ||
return ( | ||
<div> | ||
{/* Button to initiate audio playback */} | ||
<button onClick={handleUserClickToAutoplayAudio}>Click to Play Audio</button> | ||
<VoiceVisualizer | ||
isControlPanelShown={false} // Set to 'false'. You should use your own UI. | ||
isDefaultUIShown={false} // Set to 'false' in most cases, but should be determined based on the specific user's use case. | ||
controls={recorderControls} | ||
ref={audioRef} | ||
/> | ||
</div> | ||
); | ||
}; | ||
export default App; | ||
``` | ||
## Getting started | ||
@@ -161,47 +254,50 @@ | ||
| Parameter | Type | Description | | ||
|:-------------------------|:----------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | ||
| `onStartRecording` | `() => void` | Callback function triggered when recording starts. | | ||
| `onStopRecording` | `() => void` | Callback function triggered when recording stops. | | ||
| `onPausedRecording` | `() => void` | Callback function triggered when recording is paused. | | ||
| `onResumedRecording` | `() => void` | Callback function triggered when recording is resumed. | | ||
| `onClearCanvas` | `() => void` | Callback function triggered when the canvas is cleared. | | ||
| `onEndAudioPlayback` | `() => void` | Callback function triggered when audio playback ends. | | ||
| `onStartAudioPlayback` | `() => void` | Callback function triggered when audio playback starts. | | ||
| `onPausedAudioPlayback` | `() => void` | Callback function triggered when audio playback is paused. | | ||
| `onResumedAudioPlayback` | `() => void` | Callback function triggered when audio playback is resumed. | | ||
| Parameter | Type | Description | | ||
|:-------------------------|:-------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------| | ||
| `onStartRecording` | `() => void` | Callback function triggered when recording starts. | | ||
| `onStopRecording` | `() => void` | Callback function triggered when recording stops. | | ||
| `onPausedRecording` | `() => void` | Callback function triggered when recording is paused. | | ||
| `onResumedRecording` | `() => void` | Callback function triggered when recording is resumed. | | ||
| `onClearCanvas` | `() => void` | Callback function triggered when the canvas is cleared. | | ||
| `onEndAudioPlayback` | `() => void` | Callback function triggered when audio playback ends. | | ||
| `onStartAudioPlayback` | `() => void` | Callback function triggered when audio playback starts. | | ||
| `onPausedAudioPlayback` | `() => void` | Callback function triggered when audio playback is paused. | | ||
| `onResumedAudioPlayback` | `() => void` | Callback function triggered when audio playback is resumed. | | ||
| `onErrorPlayingAudio` | `(error: Error) => void` | Callback function is invoked when an error occurs during the execution of `audio.play()`. It provides an opportunity to handle and respond to such error. | | ||
##### Returns | ||
| Returns | Type | Description | | ||
|:---------------------------------------------------------------|:----------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | ||
| `audioRef` | `MutableRefObject`<br/>`<HTMLAudioElement \| null>` | Reference to the audio element used for playback. | | ||
| `isRecordingInProgress` | `boolean` | Indicates if audio recording is currently in progress. | | ||
| `isPausedRecording` | `boolean` | Indicates if audio recording is currently paused. | | ||
| `audioData` | `Uint8Array` | Audio data for real-time visualization. | | ||
| `recordingTime` | `number` | Elapsed time during recording in miliseconds. | | ||
| `mediaRecorder` | `MediaRecorder \| null` | MediaRecorder instance used for recording audio. | | ||
| `duration` | `number` | Duration of the recorded audio in seconds. | | ||
| `currentAudioTime` | `number` | Current playback time of the recorded audio in seconds. | | ||
| `audioSrc` | `string` | Source URL of the recorded audio file for playback. | | ||
| `isPausedRecordedAudio` | `boolean` | Indicates if recorded audio playback is paused. | | ||
| `isProcessingRecordedAudio` | `boolean` | Indicates if the recorded audio is being processed and 'Processing Audio...' text shown. | | ||
| `isCleared` | `boolean` | Indicates if the canvas has been cleared. | | ||
| `isPreloadedBlob` | `boolean` | Indicates whether a blob of recorded audio data has been preloaded. | | ||
| `isAvailableRecordedAudio` | `boolean` | Indicates whether recorded audi is available and not currently being processed. This return value can be used to check if it's an appropriate time to work with recorded audio data in your application. | | ||
| `recordedBlob` | `Blob \| null` | Recorded audio data in Blob format. | | ||
| `bufferFromRecordedBlob` | `AudioBuffer \| null` | Audio buffer from the recorded Blob. | | ||
| `formattedDuration` | `string` | Formatted duration time in format 09:51m. | | ||
| `formattedRecordingTime` | `string` | Formatted recording current time in format 09:51. | | ||
| `formattedRecordedAudioCurrentTime` | `string` | Formatted recorded audio current time in format 09:51:1. | | ||
| `setPreloadedAudioBlob` | `(audioBlob: Blob) => void` | This function allows you to load an existing audio blob for further processing, playback and visualization. The `audioBlob` parameter represents the recorded audio data stored in a Blob format. | | ||
| `startRecording` | `() => void` | Function to start audio recording. | | ||
| `togglePauseResume` | `() => void` | Function to toggle pause/resume during recording and playback of recorded audio. | | ||
| `stopRecording` | `() => void` | Function to stop audio recording. | | ||
| `saveAudioFile` | `() => void` | This function allows you to save the recorded audio as a `webm` file format. Please note that it supports saving audio only in the webm format. If you need to save the audio in a different format, you can use external libraries like FFmpeg to convert the Blob to your desired format. This flexibility allows you to tailor the output format according to your specific needs. | | ||
| `clearCanvas` | `() => void` | Function to clear the visualization canvas. | | ||
| `setCurrentAudioTime` | `Dispatch<SetStateAction<number>>` | Internal function to handle current audio time updates during playback. | | ||
| `error` | `Error \| null` | Error object if any error occurred during recording or playback. | | ||
| `_setIsProcessingAudioOnComplete` | `Dispatch<SetStateAction<boolean>>` | Internal function to set IsProcessingAudioOnComplete state. | | ||
| `_setIsProcessingOnResize` | `Dispatch<SetStateAction<boolean>>` | Internal function to set IsProcessingOnResize state. | | ||
| Returns | Type | Description | | ||
|:--------------------------------------------------------------------------|:----------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | ||
| `audioRef` | `MutableRefObject`<br/>`<HTMLAudioElement \| null>` | Reference to the audio element used for playback. | | ||
| `isRecordingInProgress` | `boolean` | Indicates if audio recording is currently in progress. | | ||
| `isPausedRecording` | `boolean` | Indicates if audio recording is currently paused. | | ||
| `audioData` | `Uint8Array` | Audio data for real-time visualization. | | ||
| `recordingTime` | `number` | Elapsed time during recording in miliseconds. | | ||
| `mediaRecorder` | `MediaRecorder \| null` | MediaRecorder instance used for recording audio. | | ||
| `duration` | `number` | Duration of the recorded audio in seconds. | | ||
| `currentAudioTime` | `number` | Current playback time of the recorded audio in seconds. | | ||
| `audioSrc` | `string` | Source URL of the recorded audio file for playback. | | ||
| `isPausedRecordedAudio` | `boolean` | Indicates if recorded audio playback is paused. | | ||
| `isProcessingRecordedAudio` | `boolean` | Indicates if the recorded audio is being processed and 'Processing Audio...' text shown. | | ||
| `isCleared` | `boolean` | Indicates if the canvas has been cleared. | | ||
| `isPreloadedBlob` | `boolean` | Indicates whether a blob of recorded audio data has been preloaded. | | ||
| `isAvailableRecordedAudio` | `boolean` | Indicates whether recorded audi is available and not currently being processed. This return value can be used to check if it's an appropriate time to work with recorded audio data in your application. | | ||
| `recordedBlob` | `Blob \| null` | Recorded audio data in Blob format. | | ||
| `bufferFromRecordedBlob` | `AudioBuffer \| null` | Audio buffer from the recorded Blob. | | ||
| `formattedDuration` | `string` | Formatted duration time in format 09:51m. | | ||
| `formattedRecordingTime` | `string` | Formatted recording current time in format 09:51. | | ||
| `formattedRecordedAudioCurrentTime` | `string` | Formatted recorded audio current time in format 09:51:1. | | ||
| `setPreloadedAudioBlob` | `(audioBlob: Blob) => void` | This function allows you to load an existing audio blob for further processing, playback and visualization. The `audioBlob` parameter represents the recorded audio data stored in a Blob format. | | ||
| `onClickAutoplayAudioOnLoad` | `() => void` | This function should be used in conjunction with the `setPreloadedAudioBlob` function when you load a completed audio blob. To start playing audio as soon as it becomes available, call this function but only in response to a `user's click event`. It ensures compliance with autoplay policies. | | ||
| `startRecording` | `() => void` | Function to start audio recording. | | ||
| `togglePauseResume` | `() => void` | Function to toggle pause/resume during recording and playback of recorded audio. | | ||
| `stopRecording` | `() => void` | Function to stop audio recording. | | ||
| `saveAudioFile` | `() => void` | This function allows you to save the recorded audio as a `webm` file format. Please note that it supports saving audio only in the webm format. If you need to save the audio in a different format, you can use external libraries like FFmpeg to convert the Blob to your desired format. This flexibility allows you to tailor the output format according to your specific needs. | | ||
| `clearCanvas` | `() => void` | Function to clear the visualization canvas. | | ||
| `setCurrentAudioTime` | `Dispatch<SetStateAction<number>>` | Internal function to handle current audio time updates during playback. | | ||
| `error` | `Error \| null` | Error object if any error occurred during recording or playback. | | ||
| `_isAutoplayPreloadedBlob` | `boolean` | Internal state that indicates whether an `onClickAutoplayAudioOnLoad` function has been called. It is set to `false` once the audio data becomes available. | | ||
| `_setIsProcessingAudioOnComplete` | `Dispatch<SetStateAction<boolean>>` | Internal function to set `isProcessingAudioOnComplete` state. | | ||
| `_setIsProcessingOnResize` | `Dispatch<SetStateAction<boolean>>` | Internal function to set `isProcessingOnResize` state. | | ||
@@ -208,0 +304,0 @@ #### Load and visualize any Audio |
Sorry, the diff of this file is not supported yet
Unpublished package
Supply chain riskPackage version was not found on the registry. It may exist on a different registry and need to be configured to pull from that registry.
Found 1 instance in 1 package
125687
2.93%1220
1.84%369
35.16%1
Infinity%