react-voice-visualizer
Advanced tools
Comparing version
(function(){"use strict";(e=>{try{if(typeof window>"u")return;var i=document.createElement("style");i.appendChild(document.createTextNode(e)),document.head.appendChild(i)}catch(o){console.error("vite-plugin-css-injected-by-js",o)}})(".voice-visualizer__buttons-container{display:flex;justify-content:center;align-items:center;column-gap:20px;row-gap:15px;flex-wrap:wrap;margin-bottom:40px}.voice-visualizer__btn-center{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#fff;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s}.voice-visualizer__btn-center:hover{background-color:#eaeaea}.voice-visualizer__btn-center>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause{background-color:#ff3030}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover{background-color:#ff4f4f}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause>img{height:50%;max-height:16px}.voice-visualizer__btn-center:hover{border:4px solid #9f9f9f}.voice-visualizer__btn-left{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#ff3030;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s,opacity .3s}.voice-visualizer__btn-left:hover{background-color:#ff4f4f}.voice-visualizer__btn-left:disabled{opacity:.6;background-color:#ff3030}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone{background-color:#fff}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-left>img{width:auto;height:50%;max-height:16px}.voice-visualizer__btn-left:hover{border:4px solid #9f9f9f}.voice-visualizer__btn{box-sizing:border-box;min-width:100px;min-height:60px;padding:5px 20px;border-radius:40px;font-size:15px;background-color:#f0f0f0;transition:background-color .3s,opacity .3s}.voice-visualizer__btn:disabled{opacity:.8;background-color:#f0f0f0}.voice-visualizer__btn:hover{background-color:#bebebe}.voice-visualizer__canvas-container{position:relative;width:fit-content;margin:0 auto;overflow:hidden}.voice-visualizer__canvas-container canvas{display:block}.voice-visualizer__canvas-microphone-btn{position:absolute;top:50%;left:50%;width:auto;max-width:12%;min-width:24px;height:50%;max-height:100px;background-color:transparent;border:none;outline:none;transform:translate(-50%,-50%)}.voice-visualizer__canvas-microphone-icon{width:100%;height:100%;will-change:transform;transition:transform .35s}.voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon{transform:scale(1.03)}.voice-visualizer__canvas-audio-wave-icon{position:absolute;top:50%;left:50%;width:auto;max-width:40%;height:40%;max-height:100px;transform:translate(-118%,-50%) scale(-1)}.voice-visualizer__canvas-audio-wave-icon2{transform:translate(18%,-50%)}.voice-visualizer__canvas-audio-processing{position:absolute;top:50%;left:50%;margin:0;transform:translate(-50%,-50%)}.voice-visualizer__progress-indicator-hovered{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#85858599}.voice-visualizer__progress-indicator-hovered-time{position:absolute;top:3%;left:1px;width:fit-content;margin:0;padding:0 7px;opacity:.8;font-size:12px;border-radius:0 4px 4px 0;background-color:#575757;text-align:left}.voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left{left:unset;right:1px;border-radius:4px 0 0 4px;text-align:right}.voice-visualizer__progress-indicator{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#efefef}.voice-visualizer__progress-indicator-time{position:absolute;top:3%;left:1px;width:fit-content;box-sizing:border-box;min-width:41px;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;text-align:left;color:#000;font-weight:500;background-color:#efefef}.voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left{left:unset;right:1px;border-radius:4px 0 0 4px;text-align:right}.voice-visualizer__audio-info-container{box-sizing:border-box;height:55px;display:flex;align-items:center;justify-content:center;gap:30px}.voice-visualizer__audio-info-time{margin:15px 0;min-width:38px;text-align:left}.voice-visualizer__visually-hidden{position:absolute;width:1px;height:1px;margin:-1px;padding:0;border:4px solid #c5c5c5;white-space:nowrap;clip-path:inset(100%);clip:rect(0 0 0 0);overflow:hidden}")})(); | ||
import { jsx as a, jsxs as ve, Fragment as Re } from "react/jsx-runtime"; | ||
import { useState as d, useRef as N, useCallback as Je, forwardRef as Qe, useEffect as F, useLayoutEffect as Ve } from "react"; | ||
import { jsx as s, jsxs as ve, Fragment as Re } from "react/jsx-runtime"; | ||
import { useState as l, useRef as S, useCallback as Ve, forwardRef as qe, useEffect as F, useLayoutEffect as Xe } from "react"; | ||
const Ee = ({ | ||
@@ -17,6 +17,6 @@ canvas: e, | ||
w: o, | ||
h: l | ||
h: m | ||
}) => { | ||
e.fillStyle = r, e.beginPath(), e.roundRect ? (e.roundRect(t, c, o, l, n), e.fill()) : e.fillRect(t, c, o, l); | ||
}, qe = ({ | ||
e.fillStyle = r, e.beginPath(), e.roundRect ? (e.roundRect(t, c, o, m, n), e.fill()) : e.fillRect(t, c, o, m); | ||
}, Ke = ({ | ||
barsData: e, | ||
@@ -28,6 +28,6 @@ canvas: r, | ||
mainBarColor: o, | ||
secondaryBarColor: l, | ||
secondaryBarColor: m, | ||
currentAudioTime: f = 0, | ||
rounded: A, | ||
duration: s | ||
rounded: w, | ||
duration: a | ||
}) => { | ||
@@ -37,9 +37,9 @@ const M = Ee({ canvas: r, backgroundColor: c }); | ||
return; | ||
const { context: g, height: p } = M, I = f / s; | ||
const { context: g, height: p } = M, I = f / a; | ||
e.forEach((u, L) => { | ||
const R = L / e.length, h = I > R; | ||
const O = L / e.length, h = I > O; | ||
Ae({ | ||
context: g, | ||
color: h ? l : o, | ||
rounded: A, | ||
color: h ? m : o, | ||
rounded: w, | ||
x: L * (n + t * n), | ||
@@ -52,3 +52,3 @@ y: p / 2 - u.max, | ||
}; | ||
function Xe({ | ||
function et({ | ||
context: e, | ||
@@ -71,3 +71,3 @@ color: r, | ||
} | ||
const Ke = ({ | ||
const tt = ({ | ||
audioData: e, | ||
@@ -79,6 +79,6 @@ unit: r, | ||
isRecordingInProgress: o, | ||
isPausedRecording: l, | ||
isPausedRecording: m, | ||
picks: f, | ||
backgroundColor: A, | ||
barWidth: s, | ||
backgroundColor: w, | ||
barWidth: a, | ||
mainBarColor: M, | ||
@@ -90,50 +90,50 @@ secondaryBarColor: g, | ||
}) => { | ||
const L = Ee({ canvas: c, backgroundColor: A }); | ||
const L = Ee({ canvas: c, backgroundColor: w }); | ||
if (!L) | ||
return; | ||
const { context: R, height: h, width: H, halfWidth: j } = L; | ||
const { context: O, height: h, width: $, halfWidth: A } = L; | ||
if (e != null && e.length && o) { | ||
const Z = Math.max(...e); | ||
if (!l) { | ||
if (t.current >= s) { | ||
const U = Math.max(...e); | ||
if (!m) { | ||
if (t.current >= a) { | ||
t.current = 0; | ||
const b = (h - Z / 258 * h) / h * 100, U = (-h + Z / 258 * h * 2) / h * 100, $ = n.current === s ? { | ||
startY: b, | ||
barHeight: U | ||
const E = (h - U / 258 * h) / h * 100, Y = (-h + U / 258 * h * 2) / h * 100, R = n.current === a ? { | ||
startY: E, | ||
barHeight: Y | ||
} : null; | ||
n.current >= r ? n.current = s : n.current += s, f.length > (u ? H : j) / s && f.pop(), f.unshift($); | ||
n.current >= r ? n.current = a : n.current += a, f.length > (u ? $ : A) / a && f.pop(), f.unshift(R); | ||
} | ||
t.current += 1; | ||
} | ||
!u && J(), I && Ae({ | ||
context: R, | ||
!u && Q(), I && Ae({ | ||
context: O, | ||
rounded: p, | ||
color: M, | ||
x: u ? H : j, | ||
y: h - Z / 258 * h, | ||
h: -h + Z / 258 * h * 2, | ||
w: s | ||
x: u ? $ : A, | ||
y: h - U / 258 * h, | ||
h: -h + U / 258 * h * 2, | ||
w: a | ||
}); | ||
let k = (u ? H : j) - t.current; | ||
f.forEach((b) => { | ||
b && Ae({ | ||
context: R, | ||
let W = (u ? $ : A) - t.current; | ||
f.forEach((E) => { | ||
E && Ae({ | ||
context: O, | ||
color: M, | ||
rounded: p, | ||
x: k, | ||
y: b.startY * h / 100 > h / 2 - 1 ? h / 2 - 1 : b.startY * h / 100, | ||
h: b.barHeight * h / 100 > 2 ? b.barHeight * h / 100 : 2, | ||
w: s | ||
}), k -= s; | ||
x: W, | ||
y: E.startY * h / 100 > h / 2 - 1 ? h / 2 - 1 : E.startY * h / 100, | ||
h: E.barHeight * h / 100 > 2 ? E.barHeight * h / 100 : 2, | ||
w: a | ||
}), W -= a; | ||
}); | ||
} else | ||
f.length = 0; | ||
function J() { | ||
Xe({ | ||
context: R, | ||
function Q() { | ||
et({ | ||
context: O, | ||
color: g, | ||
rounded: p, | ||
width: H, | ||
width: $, | ||
height: h, | ||
barWidth: s | ||
barWidth: a | ||
}); | ||
@@ -155,3 +155,3 @@ } | ||
).charAt(0)}`; | ||
}, et = (e) => { | ||
}, rt = (e) => { | ||
const r = Math.floor(e / 1e3), n = Math.floor(r / 3600), t = Math.floor(r % 3600 / 60), c = r % 60; | ||
@@ -171,3 +171,3 @@ return n > 0 ? `${String(n).padStart(2, "0")}:${String(t).padStart( | ||
} | ||
const tt = ({ | ||
const nt = ({ | ||
bufferData: e, | ||
@@ -179,22 +179,22 @@ height: r, | ||
}) => { | ||
const o = n / (t + c * t), l = Math.floor(e.length / o), f = r / 2; | ||
let A = [], s = 0; | ||
const o = n / (t + c * t), m = Math.floor(e.length / o), f = r / 2; | ||
let w = [], a = 0; | ||
for (let M = 0; M < o; M++) { | ||
const g = []; | ||
let p = 0; | ||
for (let u = 0; u < l && M * l + u < e.length; u++) { | ||
const L = e[M * l + u]; | ||
for (let u = 0; u < m && M * m + u < e.length; u++) { | ||
const L = e[M * m + u]; | ||
L > 0 && (g.push(L), p++); | ||
} | ||
const I = g.reduce((u, L) => u + L, 0) / p; | ||
I > s && (s = I), A.push({ max: I }); | ||
I > a && (a = I), w.push({ max: I }); | ||
} | ||
if (f * 0.95 > s * f) { | ||
const M = f * 0.95 / s; | ||
A = A.map((g) => ({ | ||
if (f * 0.95 > a * f) { | ||
const M = f * 0.95 / a; | ||
w = w.map((g) => ({ | ||
max: g.max > 0.01 ? g.max * M : 1 | ||
})); | ||
} | ||
return A; | ||
}, rt = (e) => { | ||
return w; | ||
}, it = (e) => { | ||
if (!e) | ||
@@ -204,3 +204,3 @@ return ""; | ||
return r && r.length >= 2 ? `.${r[1]}` : ""; | ||
}, nt = (e) => { | ||
}, ct = (e) => { | ||
const r = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), t = e % 60, c = Math.floor( | ||
@@ -217,3 +217,3 @@ (t - Math.floor(t)) * 1e3 | ||
).charAt(0)}${String(c).charAt(1)}s`; | ||
}, it = (e) => { | ||
}, ot = (e) => { | ||
onmessage = (r) => { | ||
@@ -223,4 +223,4 @@ postMessage(e(r.data)); | ||
}; | ||
function ct(e, r) { | ||
const [n, t] = d(r); | ||
function st(e, r) { | ||
const [n, t] = l(r); | ||
return { | ||
@@ -230,17 +230,17 @@ result: n, | ||
run: (o) => { | ||
const l = new Worker( | ||
const m = new Worker( | ||
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions | ||
URL.createObjectURL(new Blob([`(${it})(${e})`])) | ||
URL.createObjectURL(new Blob([`(${ot})(${e})`])) | ||
); | ||
l.onmessage = (f) => { | ||
f.data && (t(f.data), l.terminate()); | ||
}, l.onerror = (f) => { | ||
console.error(f.message), l.terminate(); | ||
}, l.postMessage(o); | ||
m.onmessage = (f) => { | ||
f.data && (t(f.data), m.terminate()); | ||
}, m.onerror = (f) => { | ||
console.error(f.message), m.terminate(); | ||
}, m.postMessage(o); | ||
} | ||
}; | ||
} | ||
const ot = (e, r = 250) => { | ||
const n = N(); | ||
return Je( | ||
const at = (e, r = 250) => { | ||
const n = S(); | ||
return Ve( | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
@@ -256,7 +256,7 @@ (...t) => { | ||
}; | ||
const at = ({ | ||
const ut = ({ | ||
color: e = "#000000", | ||
stroke: r = 2, | ||
className: n | ||
}) => /* @__PURE__ */ a( | ||
}) => /* @__PURE__ */ s( | ||
"svg", | ||
@@ -268,3 +268,3 @@ { | ||
className: n, | ||
children: /* @__PURE__ */ a( | ||
children: /* @__PURE__ */ s( | ||
"path", | ||
@@ -283,3 +283,3 @@ { | ||
reflect: r | ||
}) => /* @__PURE__ */ a( | ||
}) => /* @__PURE__ */ s( | ||
"svg", | ||
@@ -290,3 +290,3 @@ { | ||
className: `voice-visualizer__canvas-audio-wave-icon ${r ? "voice-visualizer__canvas-audio-wave-icon2" : ""}`, | ||
children: /* @__PURE__ */ a( | ||
children: /* @__PURE__ */ s( | ||
"path", | ||
@@ -299,3 +299,3 @@ { | ||
} | ||
), Fe = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", st = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", Ue = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", ut = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo=", mt = Qe( | ||
), Fe = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", ht = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", Ue = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", lt = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo=", dt = qe( | ||
({ | ||
@@ -309,6 +309,6 @@ controls: { | ||
currentAudioTime: o, | ||
bufferFromRecordedBlob: l, | ||
bufferFromRecordedBlob: m, | ||
togglePauseResume: f, | ||
startRecording: A, | ||
stopRecording: s, | ||
startRecording: w, | ||
stopRecording: a, | ||
saveAudioFile: M, | ||
@@ -320,36 +320,36 @@ isAvailableRecordedAudio: g, | ||
isCleared: L, | ||
formattedDuration: R, | ||
formattedDuration: O, | ||
formattedRecordingTime: h, | ||
formattedRecordedAudioCurrentTime: H, | ||
clearCanvas: j, | ||
setCurrentAudioTime: J, | ||
_setIsProcessingRecordedAudio: Z | ||
formattedRecordedAudioCurrentTime: $, | ||
clearCanvas: A, | ||
setCurrentAudioTime: Q, | ||
_setIsProcessingRecordedAudio: U | ||
}, | ||
width: k = "100%", | ||
height: b = 200, | ||
speed: U = 3, | ||
backgroundColor: $ = "transparent", | ||
mainBarColor: E = "#FFFFFF", | ||
secondaryBarColor: Q = "#5e5e5e", | ||
barWidth: te = 2, | ||
gap: V = 1, | ||
rounded: q = 5, | ||
width: W = "100%", | ||
height: E = 200, | ||
speed: Y = 3, | ||
backgroundColor: R = "transparent", | ||
mainBarColor: _ = "#FFFFFF", | ||
secondaryBarColor: V = "#5e5e5e", | ||
barWidth: re = 2, | ||
gap: q = 1, | ||
rounded: X = 5, | ||
isControlPanelShown: ye = !0, | ||
isDownloadAudioButtonShown: re = !1, | ||
animateCurrentPick: ne = !0, | ||
fullscreen: X = !1, | ||
onlyRecording: W = !1, | ||
isDefaultUIShown: ie = !0, | ||
defaultMicrophoneIconColor: de = E, | ||
defaultAudioWaveIconColor: Le = E, | ||
isDownloadAudioButtonShown: ne = !1, | ||
animateCurrentPick: ie = !0, | ||
fullscreen: K = !1, | ||
onlyRecording: J = !1, | ||
isDefaultUIShown: ce = !0, | ||
defaultMicrophoneIconColor: de = _, | ||
defaultAudioWaveIconColor: Le = _, | ||
mainContainerClassName: fe, | ||
canvasContainerClassName: Y, | ||
isProgressIndicatorShown: z = !W, | ||
progressIndicatorClassName: _, | ||
isProgressIndicatorTimeShown: K = !0, | ||
progressIndicatorTimeClassName: ce, | ||
isProgressIndicatorOnHoverShown: G = !W, | ||
progressIndicatorOnHoverClassName: D, | ||
isProgressIndicatorTimeOnHoverShown: C = !0, | ||
progressIndicatorTimeOnHoverClassName: m, | ||
canvasContainerClassName: G, | ||
isProgressIndicatorShown: z = !J, | ||
progressIndicatorClassName: C, | ||
isProgressIndicatorTimeShown: ee = !0, | ||
progressIndicatorTimeClassName: oe, | ||
isProgressIndicatorOnHoverShown: B = !J, | ||
progressIndicatorOnHoverClassName: y, | ||
isProgressIndicatorTimeOnHoverShown: x = !0, | ||
progressIndicatorTimeOnHoverClassName: v, | ||
isAudioProcessingTextShown: pe = !0, | ||
@@ -359,55 +359,55 @@ audioProcessingTextClassName: je, | ||
}, we) => { | ||
const [oe, De] = d(0), [x, ae] = d(0), [se, Te] = d(0), [ze, ue] = d(0), [he, Ne] = d(!1), [le, be] = d(0), i = le < 768, y = Math.trunc(U), S = Math.trunc(V), w = Math.trunc( | ||
i && S > 0 ? te + 1 : te | ||
), v = N(null), ee = N([]), B = N(y), me = N(w), O = N(w), ge = N(null), { | ||
const [se, De] = l(0), [H, ae] = l(0), [ue, Te] = l(0), [ze, he] = l(0), [le, Ne] = l(!1), [me, be] = l(0), [i, j] = l(!1), N = me < 768, Z = Math.trunc(Y), P = Math.trunc(q), D = Math.trunc( | ||
N && P > 0 ? re + 1 : re | ||
), d = S(null), te = S([]), T = S(Z), Ge = S(D), Be = S(D), ge = S(null), { | ||
result: Me, | ||
setResult: Ge, | ||
run: Be | ||
} = ct(tt, []), _e = ot(ke), Pe = w + S * w; | ||
setResult: Pe, | ||
run: ke | ||
} = st(nt, []), _e = at(Je), We = D + P * D; | ||
F(() => { | ||
_e(); | ||
const T = () => { | ||
Z(!0), _e(); | ||
const b = () => { | ||
g && j(!0), _e(); | ||
}; | ||
return window.addEventListener("resize", T), () => { | ||
window.removeEventListener("resize", T); | ||
return window.addEventListener("resize", b), () => { | ||
window.removeEventListener("resize", b); | ||
}; | ||
}, [k, g]), Ve(() => { | ||
v.current && ((B.current >= y || !e.length) && (B.current = 0, Ke({ | ||
}, [W, g]), Xe(() => { | ||
d.current && ((T.current >= Z || !e.length) && (T.current = 0, tt({ | ||
audioData: e, | ||
unit: Pe, | ||
index: me, | ||
index2: O, | ||
canvas: v.current, | ||
picks: ee.current, | ||
unit: We, | ||
index: Ge, | ||
index2: Be, | ||
canvas: d.current, | ||
picks: te.current, | ||
isRecordingInProgress: r, | ||
isPausedRecording: I, | ||
backgroundColor: $, | ||
mainBarColor: E, | ||
secondaryBarColor: Q, | ||
barWidth: w, | ||
rounded: q, | ||
animateCurrentPick: ne, | ||
fullscreen: X | ||
})), B.current += 1); | ||
backgroundColor: R, | ||
mainBarColor: _, | ||
secondaryBarColor: V, | ||
barWidth: D, | ||
rounded: X, | ||
animateCurrentPick: ie, | ||
fullscreen: K | ||
})), T.current += 1); | ||
}, [ | ||
v.current, | ||
d.current, | ||
e, | ||
w, | ||
$, | ||
E, | ||
Q, | ||
q, | ||
D, | ||
R, | ||
_, | ||
V, | ||
X, | ||
ie, | ||
K, | ||
ce, | ||
ze | ||
]), F(() => { | ||
var T, P; | ||
var b, k; | ||
if (g) | ||
return he ? (T = v.current) == null || T.addEventListener("mouseleave", xe) : (P = v.current) == null || P.addEventListener("mouseenter", Ce), () => { | ||
return le ? (b = d.current) == null || b.addEventListener("mouseleave", xe) : (k = d.current) == null || k.addEventListener("mouseenter", Ce), () => { | ||
var Ie, $e; | ||
he ? (Ie = v.current) == null || Ie.removeEventListener( | ||
le ? (Ie = d.current) == null || Ie.removeEventListener( | ||
"mouseleave", | ||
xe | ||
) : ($e = v.current) == null || $e.removeEventListener( | ||
) : ($e = d.current) == null || $e.removeEventListener( | ||
"mouseenter", | ||
@@ -417,19 +417,19 @@ Ce | ||
}; | ||
}, [he, g]), F(() => { | ||
var P; | ||
if (!l || !v.current || r) | ||
}, [le, g]), F(() => { | ||
var k; | ||
if (!m || !d.current || r) | ||
return; | ||
if (W) { | ||
j(); | ||
if (J) { | ||
A(); | ||
return; | ||
} | ||
ee.current = []; | ||
const T = l.getChannelData(0); | ||
return Be({ | ||
bufferData: T, | ||
height: se, | ||
te.current = []; | ||
const b = m.getChannelData(0); | ||
return ke({ | ||
bufferData: b, | ||
height: ue, | ||
width: ze, | ||
barWidth: w, | ||
gap: S | ||
}), (P = v.current) == null || P.addEventListener( | ||
barWidth: D, | ||
gap: P | ||
}), (k = d.current) == null || k.addEventListener( | ||
"mousemove", | ||
@@ -439,3 +439,3 @@ He | ||
var Ie; | ||
(Ie = v.current) == null || Ie.removeEventListener( | ||
(Ie = d.current) == null || Ie.removeEventListener( | ||
"mousemove", | ||
@@ -446,25 +446,25 @@ He | ||
}, [ | ||
l, | ||
x, | ||
se, | ||
V, | ||
te | ||
m, | ||
H, | ||
ue, | ||
q, | ||
re | ||
]), F(() => { | ||
if (!(W || !(Me != null && Me.length) || !v.current)) { | ||
if (!(J || !(Me != null && Me.length) || !d.current)) { | ||
if (L) { | ||
Ge([]); | ||
Pe([]); | ||
return; | ||
} | ||
qe({ | ||
Ke({ | ||
barsData: Me, | ||
canvas: v.current, | ||
barWidth: w, | ||
gap: S, | ||
backgroundColor: $, | ||
mainBarColor: E, | ||
secondaryBarColor: Q, | ||
canvas: d.current, | ||
barWidth: D, | ||
gap: P, | ||
backgroundColor: R, | ||
mainBarColor: _, | ||
secondaryBarColor: V, | ||
currentAudioTime: o, | ||
rounded: q, | ||
rounded: X, | ||
duration: t | ||
}), Z(!1); | ||
}), j(!1), U(!1); | ||
} | ||
@@ -475,20 +475,20 @@ }, [ | ||
L, | ||
q, | ||
$, | ||
E, | ||
Q | ||
X, | ||
R, | ||
_, | ||
V | ||
]), F(() => { | ||
u && v.current && Ee({ | ||
canvas: v.current, | ||
backgroundColor: $ | ||
(u || i) && d.current && Ee({ | ||
canvas: d.current, | ||
backgroundColor: R | ||
}); | ||
}, [u]); | ||
function ke() { | ||
if (!ge.current || !v.current) | ||
}, [u, i]); | ||
function Je() { | ||
if (!ge.current || !d.current) | ||
return; | ||
B.current = y; | ||
const T = Math.trunc( | ||
T.current = Z; | ||
const b = Math.trunc( | ||
ge.current.clientHeight * window.devicePixelRatio / 2 | ||
) * 2; | ||
ae(ge.current.clientWidth), Te(T), ue( | ||
ae(ge.current.clientWidth), Te(b), he( | ||
Math.round( | ||
@@ -503,7 +503,7 @@ ge.current.clientWidth * window.devicePixelRatio | ||
Ne(!1); | ||
}, He = (T) => { | ||
De(T.offsetX); | ||
}, We = (T) => { | ||
const P = we; | ||
P.current && v.current && (P.current.currentTime = t / x * (T.clientX - v.current.getBoundingClientRect().left), J(P.current.currentTime)); | ||
}, He = (b) => { | ||
De(b.offsetX); | ||
}, Qe = (b) => { | ||
const k = we; | ||
k.current && d.current && (k.current.currentTime = t / H * (b.clientX - d.current.getBoundingClientRect().left), Q(k.current.currentTime)); | ||
}; | ||
@@ -514,16 +514,16 @@ return /* @__PURE__ */ ve("div", { className: `voice-visualizer ${fe ?? ""}`, children: [ | ||
{ | ||
className: `voice-visualizer__canvas-container ${Y ?? ""}`, | ||
className: `voice-visualizer__canvas-container ${G ?? ""}`, | ||
ref: ge, | ||
style: { width: Ze(k) }, | ||
style: { width: Ze(W) }, | ||
children: [ | ||
/* @__PURE__ */ a( | ||
/* @__PURE__ */ s( | ||
"canvas", | ||
{ | ||
ref: v, | ||
ref: d, | ||
width: ze, | ||
height: se, | ||
onClick: We, | ||
height: ue, | ||
onClick: Qe, | ||
style: { | ||
height: Ze(b), | ||
width: x | ||
height: Ze(E), | ||
width: H | ||
}, | ||
@@ -533,12 +533,12 @@ children: "Your browser does not support HTML5 Canvas." | ||
), | ||
ie && L && /* @__PURE__ */ ve(Re, { children: [ | ||
/* @__PURE__ */ a(Oe, { color: Le }), | ||
/* @__PURE__ */ a(Oe, { color: Le, reflect: !0 }), | ||
/* @__PURE__ */ a( | ||
ce && L && /* @__PURE__ */ ve(Re, { children: [ | ||
/* @__PURE__ */ s(Oe, { color: Le }), | ||
/* @__PURE__ */ s(Oe, { color: Le, reflect: !0 }), | ||
/* @__PURE__ */ s( | ||
"button", | ||
{ | ||
onClick: A, | ||
onClick: w, | ||
className: "voice-visualizer__canvas-microphone-btn", | ||
children: /* @__PURE__ */ a( | ||
at, | ||
children: /* @__PURE__ */ s( | ||
ut, | ||
{ | ||
@@ -553,25 +553,25 @@ color: de, | ||
] }), | ||
pe && u && /* @__PURE__ */ a( | ||
pe && (u || i) && /* @__PURE__ */ s( | ||
"p", | ||
{ | ||
className: `voice-visualizer__canvas-audio-processing ${je ?? ""}`, | ||
style: { color: E }, | ||
style: { color: _ }, | ||
children: "Processing Audio..." | ||
} | ||
), | ||
he && g && !i && G && /* @__PURE__ */ a( | ||
le && g && !i && !N && B && /* @__PURE__ */ s( | ||
"div", | ||
{ | ||
className: `voice-visualizer__progress-indicator-hovered ${D ?? ""}`, | ||
className: `voice-visualizer__progress-indicator-hovered ${y ?? ""}`, | ||
style: { | ||
left: oe | ||
left: se | ||
}, | ||
children: C && /* @__PURE__ */ a( | ||
children: x && /* @__PURE__ */ s( | ||
"p", | ||
{ | ||
className: `voice-visualizer__progress-indicator-hovered-time | ||
${x - oe < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""} | ||
${m ?? ""}`, | ||
${H - se < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""} | ||
${v ?? ""}`, | ||
children: Ye( | ||
t / x * oe | ||
t / H * se | ||
) | ||
@@ -582,14 +582,14 @@ } | ||
), | ||
z && g && t ? /* @__PURE__ */ a( | ||
z && g && !i && t ? /* @__PURE__ */ s( | ||
"div", | ||
{ | ||
className: `voice-visualizer__progress-indicator ${_ ?? ""}`, | ||
className: `voice-visualizer__progress-indicator ${C ?? ""}`, | ||
style: { | ||
left: o / t * x | ||
left: o / t * H | ||
}, | ||
children: K && /* @__PURE__ */ a( | ||
children: ee && /* @__PURE__ */ s( | ||
"p", | ||
{ | ||
className: `voice-visualizer__progress-indicator-time ${x - o * x / t < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${ce ?? ""}`, | ||
children: H | ||
className: `voice-visualizer__progress-indicator-time ${H - o * H / t < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${oe ?? ""}`, | ||
children: $ | ||
} | ||
@@ -604,7 +604,7 @@ ) | ||
/* @__PURE__ */ ve("div", { className: "voice-visualizer__audio-info-container", children: [ | ||
r && /* @__PURE__ */ a("p", { className: "voice-visualizer__audio-info-time", children: h }), | ||
t && !u ? /* @__PURE__ */ a("p", { children: R }) : null | ||
r && /* @__PURE__ */ s("p", { className: "voice-visualizer__audio-info-time", children: h }), | ||
t && !u ? /* @__PURE__ */ s("p", { children: O }) : null | ||
] }), | ||
/* @__PURE__ */ ve("div", { className: "voice-visualizer__buttons-container", children: [ | ||
r && /* @__PURE__ */ a( | ||
r && /* @__PURE__ */ s( | ||
"button", | ||
@@ -614,3 +614,3 @@ { | ||
onClick: f, | ||
children: /* @__PURE__ */ a( | ||
children: /* @__PURE__ */ s( | ||
"img", | ||
@@ -624,3 +624,3 @@ { | ||
), | ||
!L && /* @__PURE__ */ a( | ||
!L && /* @__PURE__ */ s( | ||
"button", | ||
@@ -630,7 +630,7 @@ { | ||
onClick: f, | ||
disabled: u, | ||
children: /* @__PURE__ */ a( | ||
disabled: u || i, | ||
children: /* @__PURE__ */ s( | ||
"img", | ||
{ | ||
src: p ? st : Ue, | ||
src: p ? ht : Ue, | ||
alt: p ? "Play" : "Pause" | ||
@@ -641,28 +641,28 @@ } | ||
), | ||
L && /* @__PURE__ */ a( | ||
L && /* @__PURE__ */ s( | ||
"button", | ||
{ | ||
className: "voice-visualizer__btn-center", | ||
onClick: A, | ||
children: /* @__PURE__ */ a("img", { src: Fe, alt: "Microphone" }) | ||
onClick: w, | ||
children: /* @__PURE__ */ s("img", { src: Fe, alt: "Microphone" }) | ||
} | ||
), | ||
/* @__PURE__ */ a( | ||
/* @__PURE__ */ s( | ||
"button", | ||
{ | ||
className: `voice-visualizer__btn-center voice-visualizer__btn-center-pause ${r ? "" : "voice-visualizer__visually-hidden"}`, | ||
onClick: s, | ||
children: /* @__PURE__ */ a("img", { src: ut, alt: "Stop" }) | ||
onClick: a, | ||
children: /* @__PURE__ */ s("img", { src: lt, alt: "Stop" }) | ||
} | ||
), | ||
!L && /* @__PURE__ */ a( | ||
!L && /* @__PURE__ */ s( | ||
"button", | ||
{ | ||
onClick: j, | ||
onClick: A, | ||
className: `voice-visualizer__btn ${Se ?? ""}`, | ||
disabled: u, | ||
disabled: u || i, | ||
children: "Clear" | ||
} | ||
), | ||
re && n && /* @__PURE__ */ a( | ||
ne && n && /* @__PURE__ */ s( | ||
"button", | ||
@@ -672,3 +672,3 @@ { | ||
className: `voice-visualizer__btn ${Se ?? ""}`, | ||
disabled: u, | ||
disabled: u || i, | ||
children: "Download Audio" | ||
@@ -679,3 +679,3 @@ } | ||
] }), | ||
g && /* @__PURE__ */ a( | ||
g && /* @__PURE__ */ s( | ||
"audio", | ||
@@ -692,3 +692,3 @@ { | ||
); | ||
function vt({ | ||
function ft({ | ||
onStartRecording: e, | ||
@@ -700,126 +700,126 @@ onStopRecording: r, | ||
onEndAudioPlayback: o, | ||
onStartAudioPlayback: l, | ||
onStartAudioPlayback: m, | ||
onPausedAudioPlayback: f, | ||
onResumedAudioPlayback: A | ||
onResumedAudioPlayback: w | ||
} = {}) { | ||
const [s, M] = d(!1), [g, p] = d(!1), [I, u] = d(null), [L, R] = d(new Uint8Array(0)), [h, H] = d(!1), [j, J] = d(null), [Z, k] = d(null), [b, U] = d(0), [$, E] = d(0), [Q, te] = d(0), [V, q] = d(""), [ye, re] = d(!0), [ne, X] = d(0), [W, ie] = d(!0), [de, Le] = d(!1), [fe, Y] = d(null), z = N(null), _ = N(null), K = N(null), ce = N(null), G = N(null), D = N(null), C = N(null), m = N(null), pe = !!(Z && !h), je = nt(Q), Se = et(b), we = Ye(ne); | ||
const [a, M] = l(!1), [g, p] = l(!1), [I, u] = l(null), [L, O] = l(new Uint8Array(0)), [h, $] = l(!1), [A, Q] = l(null), [U, W] = l(null), [E, Y] = l(0), [R, _] = l(0), [V, re] = l(0), [q, X] = l(""), [ye, ne] = l(!0), [ie, K] = l(0), [J, ce] = l(!0), [de, Le] = l(!1), [fe, G] = l(null), z = S(null), C = S(null), ee = S(null), oe = S(null), B = S(null), y = S(null), x = S(null), v = S(null), pe = !!(U && !h), je = ct(V), Se = rt(E), we = Ye(ie); | ||
F(() => { | ||
if (!s || g) | ||
if (!a || g) | ||
return; | ||
const y = setInterval(() => { | ||
const S = performance.now(); | ||
U((w) => w + (S - $)), E(S); | ||
const j = setInterval(() => { | ||
const N = performance.now(); | ||
Y((Z) => Z + (N - R)), _(N); | ||
}, 1e3); | ||
return () => clearInterval(y); | ||
}, [$, g, s]), F(() => { | ||
if (!j || j.size === 0) | ||
return () => clearInterval(j); | ||
}, [R, g, a]), F(() => { | ||
if (!A || A.size === 0) | ||
return; | ||
(async () => { | ||
var y; | ||
var j; | ||
try { | ||
Y(null); | ||
const S = new Blob([j], { | ||
type: (y = z.current) == null ? void 0 : y.mimeType | ||
}), w = URL.createObjectURL(S); | ||
w && q(w); | ||
const v = await j.arrayBuffer(), ee = new AudioContext(), B = (O) => { | ||
k(O), te(O.duration - 0.06); | ||
}, me = (O) => { | ||
Y(O); | ||
G(null); | ||
const N = new Blob([A], { | ||
type: (j = z.current) == null ? void 0 : j.mimeType | ||
}), Z = URL.createObjectURL(N); | ||
Z && X(Z); | ||
const P = await A.arrayBuffer(), D = new AudioContext(), d = (T) => { | ||
W(T), re(T.duration - 0.06); | ||
}, te = (T) => { | ||
G(T); | ||
}; | ||
ee.decodeAudioData( | ||
v, | ||
B, | ||
me | ||
D.decodeAudioData( | ||
P, | ||
d, | ||
te | ||
); | ||
} catch (S) { | ||
if (console.error("Error processing the audio blob:", S), S instanceof Error) { | ||
Y(S); | ||
} catch (N) { | ||
if (console.error("Error processing the audio blob:", N), N instanceof Error) { | ||
G(N); | ||
return; | ||
} | ||
Y(new Error("Error processing the audio blob")); | ||
G(new Error("Error processing the audio blob")); | ||
} | ||
})(); | ||
}, [j]), F(() => { | ||
}, [A]), F(() => { | ||
if (fe) { | ||
ue(); | ||
he(); | ||
return; | ||
} | ||
}, [fe]), F(() => () => { | ||
C.current && cancelAnimationFrame(C.current), G.current && G.current.disconnect(), _.current && _.current.state !== "closed" && _.current.close(), D.current && cancelAnimationFrame(D.current), m != null && m.current && m.current.removeEventListener("ended", le), z.current && z.current.removeEventListener( | ||
x.current && cancelAnimationFrame(x.current), B.current && B.current.disconnect(), C.current && C.current.state !== "closed" && C.current.close(), y.current && cancelAnimationFrame(y.current), v != null && v.current && v.current.removeEventListener("ended", me), z.current && z.current.removeEventListener( | ||
"dataavailable", | ||
ae | ||
); | ||
}, []), F(() => (!W && !de && window.addEventListener("beforeunload", oe), () => { | ||
window.removeEventListener("beforeunload", oe); | ||
}), [W, de]); | ||
const oe = (i) => { | ||
}, []), F(() => (!J && !de && window.addEventListener("beforeunload", se), () => { | ||
window.removeEventListener("beforeunload", se); | ||
}), [J, de]); | ||
const se = (i) => { | ||
i.preventDefault(), i.returnValue = ""; | ||
}, De = () => { | ||
navigator.mediaDevices.getUserMedia({ audio: !0 }).then((i) => { | ||
ue(), ie(!1), E(performance.now()), M(!0), u(i), _.current = new window.AudioContext(), K.current = _.current.createAnalyser(), ce.current = new Uint8Array( | ||
K.current.frequencyBinCount | ||
), G.current = _.current.createMediaStreamSource(i), G.current.connect(K.current), z.current = new MediaRecorder(i), z.current.addEventListener( | ||
he(), ce(!1), _(performance.now()), M(!0), u(i), C.current = new window.AudioContext(), ee.current = C.current.createAnalyser(), oe.current = new Uint8Array( | ||
ee.current.frequencyBinCount | ||
), B.current = C.current.createMediaStreamSource(i), B.current.connect(ee.current), z.current = new MediaRecorder(i), z.current.addEventListener( | ||
"dataavailable", | ||
ae | ||
), z.current.start(), x(); | ||
), z.current.start(), H(); | ||
}).catch((i) => { | ||
if (console.error("Error starting audio recording:", i), i instanceof Error) { | ||
Y(i); | ||
G(i); | ||
return; | ||
} | ||
Y(new Error("Error starting audio recording")); | ||
G(new Error("Error starting audio recording")); | ||
}); | ||
}, x = () => { | ||
K.current.getByteTimeDomainData(ce.current), R(new Uint8Array(ce.current)), D.current = requestAnimationFrame(x); | ||
}, H = () => { | ||
ee.current.getByteTimeDomainData(oe.current), O(new Uint8Array(oe.current)), y.current = requestAnimationFrame(H); | ||
}, ae = (i) => { | ||
z.current && J(i.data); | ||
}, se = () => { | ||
C.current && cancelAnimationFrame(C.current), m.current && (X(m.current.currentTime), C.current = requestAnimationFrame(se)); | ||
z.current && Q(i.data); | ||
}, ue = () => { | ||
x.current && cancelAnimationFrame(x.current), v.current && (K(v.current.currentTime), x.current = requestAnimationFrame(ue)); | ||
}, Te = () => { | ||
s || (e && e(), De()); | ||
a || (e && e(), De()); | ||
}, ze = () => { | ||
s && (r && r(), H(!0), M(!1), U(0), p(!1), D.current && cancelAnimationFrame(D.current), G.current && G.current.disconnect(), _.current && _.current.state !== "closed" && _.current.close(), I == null || I.getTracks().forEach((i) => i.stop()), z.current && (z.current.stop(), z.current.removeEventListener( | ||
a && (r && r(), $(!0), M(!1), Y(0), p(!1), y.current && cancelAnimationFrame(y.current), B.current && B.current.disconnect(), C.current && C.current.state !== "closed" && C.current.close(), I == null || I.getTracks().forEach((i) => i.stop()), z.current && (z.current.stop(), z.current.removeEventListener( | ||
"dataavailable", | ||
ae | ||
))); | ||
}, ue = () => { | ||
D.current && cancelAnimationFrame(D.current), m != null && m.current && m.current.removeEventListener("ended", le), C.current && cancelAnimationFrame(C.current), z.current && (z.current.removeEventListener( | ||
}, he = () => { | ||
y.current && cancelAnimationFrame(y.current), v != null && v.current && v.current.removeEventListener("ended", me), x.current && cancelAnimationFrame(x.current), z.current && (z.current.removeEventListener( | ||
"dataavailable", | ||
ae | ||
), z.current.stop(), z.current = null), I == null || I.getTracks().forEach((i) => i.stop()), z.current = null, _.current = null, K.current = null, ce.current = null, G.current = null, D.current = null, C.current = null, c && c(), u(null), M(!1), H(!1), J(null), k(null), U(0), E(0), te(0), q(""), X(0), re(!0), p(!1), R(new Uint8Array(0)), Y(null), ie(!0); | ||
}, he = (i) => { | ||
i instanceof Blob && (ue(), Le(!0), ie(!1), H(!0), M(!1), U(0), p(!1), J(i)); | ||
), z.current.stop(), z.current = null), I == null || I.getTracks().forEach((i) => i.stop()), z.current = null, C.current = null, ee.current = null, oe.current = null, B.current = null, y.current = null, x.current = null, c && c(), u(null), M(!1), $(!1), Q(null), W(null), Y(0), _(0), re(0), X(""), K(0), ne(!0), p(!1), O(new Uint8Array(0)), G(null), ce(!0); | ||
}, le = (i) => { | ||
i instanceof Blob && (he(), Le(!0), ce(!1), $(!0), M(!1), Y(0), p(!1), Q(i)); | ||
}, Ne = () => { | ||
var i, y, S, w, v, ee, B, me; | ||
if (s) { | ||
p((O) => !O), ((i = z.current) == null ? void 0 : i.state) === "recording" ? (n && n(), (y = z.current) == null || y.pause(), U((O) => O + (performance.now() - $)), D.current && cancelAnimationFrame(D.current)) : (t && t(), (S = z.current) == null || S.resume(), E(performance.now()), D.current = requestAnimationFrame(x)); | ||
var i, j, N, Z, P, D, d, te; | ||
if (a) { | ||
p((T) => !T), ((i = z.current) == null ? void 0 : i.state) === "recording" ? (n && n(), (j = z.current) == null || j.pause(), Y((T) => T + (performance.now() - R)), y.current && cancelAnimationFrame(y.current)) : (t && t(), (N = z.current) == null || N.resume(), _(performance.now()), y.current = requestAnimationFrame(H)); | ||
return; | ||
} | ||
m.current && pe && (C.current && cancelAnimationFrame(C.current), (w = m.current) != null && w.paused ? (l && ne === 0 && l(), A && ne !== 0 && A(), (v = m.current) == null || v.addEventListener("ended", le), (ee = m.current) == null || ee.play(), re(!1), se()) : (f && f(), (B = m.current) == null || B.removeEventListener("ended", le), (me = m.current) == null || me.pause(), re(!0))); | ||
}, le = () => { | ||
re(!0), o && o(), m != null && m.current && (m.current.currentTime = 0, X(0)); | ||
v.current && pe && (x.current && cancelAnimationFrame(x.current), (Z = v.current) != null && Z.paused ? (m && ie === 0 && m(), w && ie !== 0 && w(), (P = v.current) == null || P.addEventListener("ended", me), (D = v.current) == null || D.play(), ne(!1), ue()) : (f && f(), (d = v.current) == null || d.removeEventListener("ended", me), (te = v.current) == null || te.pause(), ne(!0))); | ||
}, me = () => { | ||
ne(!0), o && o(), v != null && v.current && (v.current.currentTime = 0, K(0)); | ||
}, be = () => { | ||
var y; | ||
if (!V) | ||
var j; | ||
if (!q) | ||
return; | ||
const i = document.createElement("a"); | ||
i.href = V, i.download = `recorded_audio${rt( | ||
(y = z.current) == null ? void 0 : y.mimeType | ||
)}`, document.body.appendChild(i), i.click(), document.body.removeChild(i), URL.revokeObjectURL(V); | ||
i.href = q, i.download = `recorded_audio${it( | ||
(j = z.current) == null ? void 0 : j.mimeType | ||
)}`, document.body.appendChild(i), i.click(), document.body.removeChild(i), URL.revokeObjectURL(q); | ||
}; | ||
return { | ||
isRecordingInProgress: s, | ||
isRecordingInProgress: a, | ||
isPausedRecording: g, | ||
audioData: L, | ||
recordingTime: b, | ||
recordingTime: E, | ||
isProcessingRecordedAudio: h, | ||
recordedBlob: j, | ||
recordedBlob: A, | ||
mediaRecorder: z.current, | ||
duration: Q, | ||
currentAudioTime: ne, | ||
audioSrc: V, | ||
duration: V, | ||
currentAudioTime: ie, | ||
audioSrc: q, | ||
isPausedRecordedAudio: ye, | ||
bufferFromRecordedBlob: Z, | ||
isCleared: W, | ||
bufferFromRecordedBlob: U, | ||
isCleared: J, | ||
isAvailableRecordedAudio: pe, | ||
@@ -830,3 +830,3 @@ isPreloadedBlob: de, | ||
formattedRecordedAudioCurrentTime: we, | ||
setPreloadedAudioBlob: he, | ||
setPreloadedAudioBlob: le, | ||
startRecording: Te, | ||
@@ -836,12 +836,12 @@ togglePauseResume: Ne, | ||
saveAudioFile: be, | ||
clearCanvas: ue, | ||
setCurrentAudioTime: X, | ||
clearCanvas: he, | ||
setCurrentAudioTime: K, | ||
error: fe, | ||
_setIsProcessingRecordedAudio: H, | ||
audioRef: m | ||
_setIsProcessingRecordedAudio: $, | ||
audioRef: v | ||
}; | ||
} | ||
export { | ||
mt as VoiceVisualizer, | ||
vt as useVoiceVisualizer | ||
dt as VoiceVisualizer, | ||
ft as useVoiceVisualizer | ||
}; |
{ | ||
"name": "react-voice-visualizer", | ||
"private": false, | ||
"version": "1.3.1", | ||
"version": "1.3.2", | ||
"type": "module", | ||
@@ -6,0 +6,0 @@ "author": "Yurii Zarytskyi", |
Sorry, the diff of this file is not supported yet
120368
0.12%