You're Invited:Meet the Socket Team at BlackHat and DEF CON in Las Vegas, Aug 4-6.RSVP
Socket
Book a DemoInstallSign in
Socket

react-voice-visualizer

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

react-voice-visualizer - npm Package Compare versions

Comparing version

to
2.0.5

606

dist/react-voice-visualizer.js
(function(){"use strict";(e=>{try{if(typeof window>"u")return;var i=document.createElement("style");i.appendChild(document.createTextNode(e)),document.head.appendChild(i)}catch(o){console.error("vite-plugin-css-injected-by-js",o)}})(".voice-visualizer__buttons-container{display:flex;justify-content:center;align-items:center;column-gap:20px;row-gap:15px;flex-wrap:wrap;margin-bottom:40px}.voice-visualizer__btn-center{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#fff;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s}.voice-visualizer__btn-center:disabled{opacity:.85;cursor:default}.voice-visualizer__btn-center:hover{background-color:#eaeaea;border:4px solid #9f9f9f}.voice-visualizer__btn-center>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause{background-color:#ff3030}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover{background-color:#ff4f4f}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause>img{height:50%;max-height:16px}.voice-visualizer__btn-center.voice-visualizer__btn-center--border-transparent{border-color:transparent}.voice-visualizer__btn-center.voice-visualizer__btn-center--border-transparent:hover{background-color:#fff}.voice-visualizer__btn-left{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#ff3030;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s,opacity .3s}.voice-visualizer__btn-container{min-width:100px;display:flex;justify-content:flex-end}.voice-visualizer__btn-left:hover{background-color:#ff4f4f}.voice-visualizer__btn-left:disabled{opacity:.6;background-color:#ff3030;cursor:default}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone{background-color:#fff}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-left>img{width:auto;height:50%;max-height:16px}.voice-visualizer__btn-left:hover{border:4px solid #9f9f9f}.voice-visualizer__btn{box-sizing:border-box;min-width:100px;min-height:60px;padding:5px 20px;border-radius:40px;font-size:15px;background-color:#f0f0f0;transition:background-color .3s,opacity .3s}.voice-visualizer__btn:disabled{opacity:.8;background-color:#f0f0f0;cursor:default}.voice-visualizer__btn:hover{background-color:#bebebe}.voice-visualizer__canvas-container{position:relative;width:fit-content;margin:0 auto;overflow:hidden}.voice-visualizer__canvas-container canvas{display:block}.voice-visualizer__canvas-microphone-btn{position:absolute;top:50%;left:50%;width:auto;max-width:12%;min-width:24px;height:50%;max-height:100px;background-color:transparent;border:none;outline:none;transform:translate(-50%,-50%)}.voice-visualizer__canvas-microphone-icon{width:100%;height:100%;will-change:transform;transition:transform .3s}.voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon{transform:scale(1.03)}.voice-visualizer__canvas-audio-wave-icon{position:absolute;top:50%;left:50%;width:auto;max-width:40%;height:40%;max-height:100px;transform:translate(-118%,-50%) scale(-1)}.voice-visualizer__canvas-audio-wave-icon2{transform:translate(18%,-50%)}.voice-visualizer__canvas-audio-processing{position:absolute;top:50%;left:50%;margin:0;transform:translate(-50%,-50%)}.voice-visualizer__progress-indicator-hovered{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#85858599}.voice-visualizer__progress-indicator-hovered-time{position:absolute;top:3%;left:1px;width:fit-content;margin:0;padding:0 7px;opacity:.8;font-size:12px;border-radius:0 4px 4px 0;background-color:#575757;text-align:left}.voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__progress-indicator{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#efefef}.voice-visualizer__progress-indicator-time{position:absolute;top:3%;left:1px;width:fit-content;box-sizing:border-box;min-width:37px;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;text-align:left;color:#000;font-weight:500;background-color:#efefef}.voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__audio-info-container{box-sizing:border-box;height:55px;display:flex;align-items:center;justify-content:center;gap:30px}.voice-visualizer__audio-info-time{margin:15px 0;min-width:38px;text-align:left}.voice-visualizer__visually-hidden{position:absolute;width:1px;height:1px;margin:-1px;padding:0;border:4px solid #c5c5c5;white-space:nowrap;clip-path:inset(100%);clip:rect(0 0 0 0);overflow:hidden}.voice-visualizer__relative{position:relative}.voice-visualizer__spinner-wrapper{position:absolute;top:50%;left:50%;transform:translate(-50%,-50%);width:52px;height:52px;display:flex;justify-content:center;align-items:center}.voice-visualizer__spinner{flex-shrink:0;width:100%;height:100%;border:4px solid rgba(197,197,197,.5);border-radius:50%;border-top-color:#b7b7b7;animation:voice-visualizer__spin 1s ease-in-out infinite;-webkit-animation:voice-visualizer__spin 1s ease-in-out infinite}@keyframes voice-visualizer__spin{to{transform:rotate(360deg)}}@-webkit-keyframes voice-visualizer__spin{to{transform:rotate(360deg)}}")})();
import { jsx as a, jsxs as ee, Fragment as Ge } from "react/jsx-runtime";
import { useState as m, useRef as N, useCallback as nt, useLayoutEffect as ke, useEffect as U } from "react";
const $e = ({
import { jsx as a, jsxs as te, Fragment as Ge } from "react/jsx-runtime";
import { useState as m, useRef as y, useCallback as tt, useEffect as F, useLayoutEffect as rt } from "react";
const He = ({
canvas: e,

@@ -10,3 +10,3 @@ backgroundColor: t

return u ? (u.clearRect(0, 0, i, r), t !== "transparent" && (u.fillStyle = t, u.fillRect(0, 0, i, r)), { context: u, height: r, width: i, halfWidth: c }) : null;
}, Ce = ({
}, _e = ({
context: e,

@@ -18,6 +18,6 @@ color: t,

w: u,
h: L
h: w
}) => {
e.fillStyle = t, e.beginPath(), e.roundRect ? (e.roundRect(i, c, u, L, r), e.fill()) : e.fillRect(i, c, u, L);
}, it = ({
e.fillStyle = t, e.beginPath(), e.roundRect ? (e.roundRect(i, c, u, w, r), e.fill()) : e.fillRect(i, c, u, w);
}, nt = ({
barsData: e,

@@ -29,3 +29,3 @@ canvas: t,

mainBarColor: u,
secondaryBarColor: L,
secondaryBarColor: w,
currentAudioTime: v = 0,

@@ -35,11 +35,11 @@ rounded: z,

}) => {
const f = $e({ canvas: t, backgroundColor: c });
const f = He({ canvas: t, backgroundColor: c });
if (!f)
return;
const { context: S, height: g } = f, A = v / d;
const { context: S, height: g } = f, N = v / d;
e.forEach((h, I) => {
const w = I / e.length, s = A > w;
Ce({
const L = I / e.length, s = N > L;
_e({
context: S,
color: s ? L : u,
color: s ? w : u,
rounded: z,

@@ -53,3 +53,3 @@ x: I * (r + i * r),

};
function ct({
function it({
context: e,

@@ -62,3 +62,3 @@ color: t,

}) {
Ce({
_e({
context: e,

@@ -73,3 +73,3 @@ color: t,

}
const st = ({
const ct = ({
audioData: e,

@@ -81,3 +81,3 @@ unit: t,

isRecordingInProgress: u,
isPausedRecording: L,
isPausedRecording: w,
picks: v,

@@ -89,40 +89,40 @@ backgroundColor: z,

rounded: g,
animateCurrentPick: A,
animateCurrentPick: N,
fullscreen: h
}) => {
const I = $e({ canvas: c, backgroundColor: z });
const I = He({ canvas: c, backgroundColor: z });
if (!I)
return;
const { context: w, height: s, width: H, halfWidth: b } = I;
const { context: L, height: s, width: $, halfWidth: x } = I;
if (e != null && e.length && u) {
const E = Math.max(...e);
if (!L) {
const b = Math.max(...e);
if (!w) {
if (i.current >= d) {
i.current = 0;
const y = (s - E / 258 * s) / s * 100, W = (-s + E / 258 * s * 2) / s * 100, $ = r.current === d ? {
startY: y,
const A = (s - b / 258 * s) / s * 100, W = (-s + b / 258 * s * 2) / s * 100, O = r.current === d ? {
startY: A,
barHeight: W
} : null;
r.current >= t ? r.current = d : r.current += d, v.length > (h ? H : b) / d && v.pop(), v.unshift($);
r.current >= t ? r.current = d : r.current += d, v.length > (h ? $ : x) / d && v.pop(), v.unshift(O);
}
i.current += 1;
}
!h && he(), A && Ce({
context: w,
!h && le(), N && _e({
context: L,
rounded: g,
color: f,
x: h ? H : b,
y: s - E / 258 * s,
h: -s + E / 258 * s * 2,
x: h ? $ : x,
y: s - b / 258 * s,
h: -s + b / 258 * s * 2,
w: d
});
let B = (h ? H : b) - i.current;
v.forEach((y) => {
y && Ce({
context: w,
let B = (h ? $ : x) - i.current;
v.forEach((A) => {
A && _e({
context: L,
color: f,
rounded: g,
x: B,
y: y.startY * s / 100 > s / 2 - 1 ? s / 2 - 1 : y.startY * s / 100,
h: y.barHeight * s / 100 > 2 ? y.barHeight * s / 100 : 2,
y: A.startY * s / 100 > s / 2 - 1 ? s / 2 - 1 : A.startY * s / 100,
h: A.barHeight * s / 100 > 2 ? A.barHeight * s / 100 : 2,
w: d

@@ -133,8 +133,8 @@ }), B -= d;

v.length = 0;
function he() {
ct({
context: w,
function le() {
it({
context: L,
color: S,
rounded: g,
width: H,
width: $,
height: s,

@@ -144,3 +144,3 @@ barWidth: d

}
}, Je = (e) => {
}, ke = (e) => {
const t = Math.floor(e / 3600), r = Math.floor(e % 3600 / 60), i = e % 60, c = Math.floor(

@@ -159,3 +159,3 @@ (i - Math.floor(i)) * 1e3

).charAt(0)}`;
}, ot = (e) => {
}, st = (e) => {
const t = Math.floor(e / 1e3), r = Math.floor(t / 3600), i = Math.floor(t % 3600 / 60), c = t % 60;

@@ -175,3 +175,3 @@ return r > 0 ? `${String(r).padStart(2, "0")}:${String(i).padStart(

}
const at = ({
const ot = ({
bufferData: e,

@@ -183,3 +183,3 @@ height: t,

}) => {
const u = r / (i + c * i), L = Math.floor(e.length / u), v = t / 2;
const u = r / (i + c * i), w = Math.floor(e.length / u), v = t / 2;
let z = [], d = 0;

@@ -189,8 +189,8 @@ for (let f = 0; f < u; f++) {

let g = 0;
for (let h = 0; h < L && f * L + h < e.length; h++) {
const I = e[f * L + h];
for (let h = 0; h < w && f * w + h < e.length; h++) {
const I = e[f * w + h];
I > 0 && (S.push(I), g++);
}
const A = S.reduce((h, I) => h + I, 0) / g;
A > d && (d = A), z.push({ max: A });
const N = S.reduce((h, I) => h + I, 0) / g;
N > d && (d = N), z.push({ max: N });
}

@@ -204,3 +204,3 @@ if (v * 0.95 > d * v) {

return z;
}, ut = (e) => {
}, at = (e) => {
if (!e)

@@ -210,3 +210,3 @@ return "";

return t && t.length >= 2 ? `.${t[1]}` : "";
}, ht = (e) => {
}, ut = (e) => {
const t = Math.floor(e / 3600), r = Math.floor(e % 3600 / 60), i = e % 60, c = Math.floor(

@@ -223,3 +223,3 @@ (i - Math.floor(i)) * 1e3

).charAt(0)}${String(c).charAt(1)}s`;
}, lt = (e) => {
}, ht = (e) => {
onmessage = (t) => {

@@ -229,3 +229,3 @@ postMessage(e(t.data));

};
function mt({
function lt({
fn: e,

@@ -239,6 +239,6 @@ initialValue: t,

setResult: c,
run: (L) => {
run: (w) => {
const v = new Worker(
// eslint-disable-next-line @typescript-eslint/restrict-template-expressions
URL.createObjectURL(new Blob([`(${lt})(${e})`]))
URL.createObjectURL(new Blob([`(${ht})(${e})`]))
);

@@ -249,9 +249,9 @@ v.onmessage = (z) => {

console.error(z.message), v.terminate();
}, v.postMessage(L);
}, v.postMessage(w);
}
};
}
const vt = (e, t = 250) => {
const r = N();
return nt(
const mt = (e, t = 250) => {
const r = y();
return tt(
// eslint-disable-next-line @typescript-eslint/no-explicit-any

@@ -267,9 +267,3 @@ (...i) => {

};
function dt(e) {
const t = N(e);
return ke(() => {
t.current = e;
}, [e]), t;
}
const ft = ({
const vt = ({
color: e = "#000000",

@@ -313,3 +307,3 @@ stroke: t = 2,

}
), Be = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", zt = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", We = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", gt = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo=", It = ({
), Be = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", dt = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", We = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", ft = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo=", Mt = ({
controls: {

@@ -322,3 +316,3 @@ audioRef: e,

currentAudioTime: u,
audioSrc: L,
audioSrc: w,
bufferFromRecordedBlob: v,

@@ -330,83 +324,84 @@ togglePauseResume: z,

isAvailableRecordedAudio: g,
isPausedRecordedAudio: A,
isPausedRecordedAudio: N,
isPausedRecording: h,
isProcessingStartRecording: I,
isProcessingRecordedAudio: w,
isProcessingRecordedAudio: L,
isCleared: s,
formattedDuration: H,
formattedRecordingTime: b,
formattedRecordedAudioCurrentTime: he,
clearCanvas: E,
formattedDuration: $,
formattedRecordingTime: x,
formattedRecordedAudioCurrentTime: le,
clearCanvas: b,
setCurrentAudioTime: B,
isProcessingOnResize: y,
isProcessingOnResize: A,
_setIsProcessingOnResize: W,
_setIsProcessingAudioOnComplete: $
_setIsProcessingAudioOnComplete: O
},
width: te = "100%",
height: re = 200,
speed: we = 3,
backgroundColor: F = "transparent",
mainBarColor: D = "#FFFFFF",
width: re = "100%",
height: ne = 200,
speed: Le = 3,
backgroundColor: Y = "transparent",
mainBarColor: j = "#FFFFFF",
secondaryBarColor: k = "#5e5e5e",
barWidth: le = 2,
barWidth: me = 2,
gap: J = 1,
rounded: O = 5,
rounded: Z = 5,
isControlPanelShown: Q = !0,
isDownloadAudioButtonShown: ne = !1,
animateCurrentPick: me = !0,
fullscreen: ve = !1,
isDownloadAudioButtonShown: ie = !1,
animateCurrentPick: ve = !0,
fullscreen: de = !1,
onlyRecording: V = !1,
isDefaultUIShown: Le = !0,
defaultMicrophoneIconColor: Se = D,
defaultAudioWaveIconColor: ie = D,
mainContainerClassName: ce,
isDefaultUIShown: we = !0,
defaultMicrophoneIconColor: Se = j,
defaultAudioWaveIconColor: ce = j,
mainContainerClassName: se,
canvasContainerClassName: Ne,
isProgressIndicatorShown: se = !V,
isProgressIndicatorShown: oe = !V,
progressIndicatorClassName: M,
isProgressIndicatorTimeShown: Z = !0,
isProgressIndicatorTimeShown: U = !0,
progressIndicatorTimeClassName: q,
isProgressIndicatorOnHoverShown: oe = !V,
isProgressIndicatorOnHoverShown: ae = !V,
progressIndicatorOnHoverClassName: X,
isProgressIndicatorTimeOnHoverShown: C = !0,
progressIndicatorTimeOnHoverClassName: x,
isProgressIndicatorTimeOnHoverShown: E = !0,
progressIndicatorTimeOnHoverClassName: H,
isAudioProcessingTextShown: o = !0,
audioProcessingTextClassName: Ae,
controlButtonsClassName: je
audioProcessingTextClassName: ye,
controlButtonsClassName: Ae
}) => {
const [de, be] = m(0), [T, ye] = m(0), [ae, xe] = m(0), [fe, ze] = m(0), [Y, ge] = m(!1), [De, Te] = m(window.innerWidth), [G, _e] = m(!1), Ee = De < 768, P = Math.trunc(we), ue = Math.trunc(J), n = Math.trunc(
Ee && ue > 0 ? le + 1 : le
), p = n + ue * n, l = N(null), _ = N([]), K = N(P), Qe = N(n), Ve = N(n), Me = N(null), qe = dt(De), {
const [fe, be] = m(0), [D, je] = m(0), [ue, Ee] = m(0), [ze, ge] = m(0), [G, Me] = m(!1), [Ce, xe] = m(window.innerWidth), [P, De] = m(!1), Te = Ce < 768, R = Math.trunc(Le), he = Math.trunc(J), n = Math.trunc(
Te && he > 0 ? me + 1 : me
), p = n + he * n, l = y(null), T = y([]), K = y(R), Je = y(n), Qe = y(n), ee = y(null), {
result: pe,
setResult: Xe,
run: Ke
} = mt({
fn: at,
setResult: Ve,
run: qe
} = lt({
fn: ot,
initialValue: [],
onMessageReceived: tt
}), et = vt(He);
U(() => {
He();
const j = () => {
qe.current !== window.innerWidth && (g ? (Te(window.innerWidth), W(!0), _e(!0), et()) : (Te(window.innerWidth), He()));
onMessageReceived: Ke
}), Xe = mt($e);
F(() => {
if (!ee.current)
return;
const _ = () => {
xe(window.innerWidth), g ? (W(!0), De(!0), Xe()) : $e();
}, C = new ResizeObserver(_);
return C.observe(ee.current), () => {
C.disconnect();
};
return window.addEventListener("resize", j), () => {
window.removeEventListener("resize", j);
};
}, [te, g]), ke(() => {
l.current && ((K.current >= P || !t.length) && (K.current = t.length ? 0 : P, st({
}, [re, g]), rt(() => {
l.current && ((K.current >= R || !t.length) && (K.current = t.length ? 0 : R, ct({
audioData: t,
unit: p,
index: Qe,
index2: Ve,
index: Je,
index2: Qe,
canvas: l.current,
picks: _.current,
picks: T.current,
isRecordingInProgress: r,
isPausedRecording: h,
backgroundColor: F,
mainBarColor: D,
backgroundColor: Y,
mainBarColor: j,
secondaryBarColor: k,
barWidth: n,
rounded: O,
animateCurrentPick: me,
fullscreen: ve
rounded: Z,
animateCurrentPick: ve,
fullscreen: de
})), K.current += 1);

@@ -417,33 +412,33 @@ }, [

n,
F,
D,
Y,
j,
k,
O,
ve,
Le,
fe
]), U(() => {
var j, R;
Z,
de,
we,
ze
]), F(() => {
var _, C;
if (g)
return Y ? (j = l.current) == null || j.addEventListener("mouseleave", Ze) : (R = l.current) == null || R.addEventListener("mouseenter", Oe), () => {
return G ? (_ = l.current) == null || _.addEventListener("mouseleave", Ze) : (C = l.current) == null || C.addEventListener("mouseenter", Oe), () => {
var Ie, Ye;
Y ? (Ie = l.current) == null || Ie.removeEventListener("mouseleave", Ze) : (Ye = l.current) == null || Ye.removeEventListener("mouseenter", Oe);
G ? (Ie = l.current) == null || Ie.removeEventListener("mouseleave", Ze) : (Ye = l.current) == null || Ye.removeEventListener("mouseenter", Oe);
};
}, [Y, g]), U(() => {
var R;
if (!v || !l.current || r || G)
}, [G, g]), F(() => {
var C;
if (!v || !l.current || r || P)
return;
if (V) {
E();
b();
return;
}
_.current = [];
const j = v.getChannelData(0);
return Ke({
bufferData: j,
height: ae,
width: fe,
T.current = [];
const _ = v.getChannelData(0);
return qe({
bufferData: _,
height: ue,
width: ze,
barWidth: n,
gap: ue
}), (R = l.current) == null || R.addEventListener("mousemove", Ue), () => {
gap: he
}), (C = l.current) == null || C.addEventListener("mousemove", Ue), () => {
var Ie;

@@ -457,23 +452,23 @@ (Ie = l.current) == null || Ie.removeEventListener(

v,
T,
ae,
D,
ue,
J,
le,
G
]), U(() => {
if (!(V || !(pe != null && pe.length) || !l.current || w)) {
me,
P
]), F(() => {
if (!(V || !(pe != null && pe.length) || !l.current || L)) {
if (s) {
Xe([]);
Ve([]);
return;
}
it({
nt({
barsData: pe,
canvas: l.current,
barWidth: n,
gap: ue,
backgroundColor: F,
mainBarColor: D,
gap: he,
backgroundColor: Y,
mainBarColor: j,
secondaryBarColor: k,
currentAudioTime: u,
rounded: O,
rounded: Z,
duration: c

@@ -486,47 +481,47 @@ });

s,
O,
F,
D,
Z,
Y,
j,
k
]), U(() => {
w && l.current && $e({
]), F(() => {
L && l.current && He({
canvas: l.current,
backgroundColor: F
backgroundColor: Y
});
}, [w]);
function He() {
if (!Me.current || !l.current)
}, [L]);
function $e() {
if (!ee.current || !l.current)
return;
K.current = P;
const j = Math.trunc(
Me.current.clientHeight * window.devicePixelRatio / 2
K.current = R;
const _ = Math.trunc(
ee.current.clientHeight * window.devicePixelRatio / 2
) * 2;
ye(Me.current.clientWidth), xe(j), ze(
je(ee.current.clientWidth), Ee(_), ge(
Math.round(
Me.current.clientWidth * window.devicePixelRatio
ee.current.clientWidth * window.devicePixelRatio
)
), _e(!1);
), De(!1);
}
function tt() {
W(!1), $(!1), e != null && e.current && !y && (e.current.src = L);
function Ke() {
W(!1), O(!1), e != null && e.current && !A && (e.current.src = w);
}
const Oe = () => {
ge(!0);
Me(!0);
}, Ze = () => {
ge(!1);
}, Ue = (j) => {
be(j.offsetX);
}, rt = (j) => {
Me(!1);
}, Ue = (_) => {
be(_.offsetX);
}, et = (_) => {
if (e != null && e.current && l.current) {
const R = c / T * (j.clientX - l.current.getBoundingClientRect().left);
e.current.currentTime = R, B(R);
const C = c / D * (_.clientX - l.current.getBoundingClientRect().left);
e.current.currentTime = C, B(C);
}
}, Fe = u / c * T;
return /* @__PURE__ */ ee("div", { className: `voice-visualizer ${ce ?? ""}`, children: [
/* @__PURE__ */ ee(
}, Fe = u / c * D;
return /* @__PURE__ */ te("div", { className: `voice-visualizer ${se ?? ""}`, children: [
/* @__PURE__ */ te(
"div",
{
className: `voice-visualizer__canvas-container ${Ne ?? ""}`,
ref: Me,
style: { width: Pe(te) },
ref: ee,
style: { width: Pe(re) },
children: [

@@ -537,8 +532,8 @@ /* @__PURE__ */ a(

ref: l,
width: fe,
height: ae,
onClick: rt,
width: ze,
height: ue,
onClick: et,
style: {
height: Pe(re),
width: T
height: Pe(ne),
width: D
},

@@ -548,12 +543,13 @@ children: "Your browser does not support HTML5 Canvas."

),
Le && s && /* @__PURE__ */ ee(Ge, { children: [
/* @__PURE__ */ a(Re, { color: ie }),
/* @__PURE__ */ a(Re, { color: ie, reflect: !0 }),
we && s && /* @__PURE__ */ te(Ge, { children: [
/* @__PURE__ */ a(Re, { color: ce }),
/* @__PURE__ */ a(Re, { color: ce, reflect: !0 }),
/* @__PURE__ */ a(
"button",
{
type: "button",
onClick: d,
className: "voice-visualizer__canvas-microphone-btn",
children: /* @__PURE__ */ a(
ft,
vt,
{

@@ -568,11 +564,11 @@ color: Se,

] }),
o && w && /* @__PURE__ */ a(
o && L && /* @__PURE__ */ a(
"p",
{
className: `voice-visualizer__canvas-audio-processing ${Ae ?? ""}`,
style: { color: D },
className: `voice-visualizer__canvas-audio-processing ${ye ?? ""}`,
style: { color: j },
children: "Processing Audio..."
}
),
Y && g && !w && !Ee && oe && /* @__PURE__ */ a(
G && g && !L && !Te && ae && /* @__PURE__ */ a(
"div",

@@ -582,12 +578,12 @@ {

style: {
left: de
left: fe
},
children: C && /* @__PURE__ */ a(
children: E && /* @__PURE__ */ a(
"p",
{
className: `voice-visualizer__progress-indicator-hovered-time
${T - de < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""}
${x ?? ""}`,
children: Je(
c / T * de
${D - fe < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""}
${H ?? ""}`,
children: ke(
c / D * fe
)

@@ -598,3 +594,3 @@ }

),
se && g && !w && c ? /* @__PURE__ */ a(
oe && g && !L && c ? /* @__PURE__ */ a(
"div",

@@ -604,9 +600,9 @@ {

style: {
left: Fe < T - 1 ? Fe : T - 1
left: Fe < D - 1 ? Fe : D - 1
},
children: Z && /* @__PURE__ */ a(
children: U && /* @__PURE__ */ a(
"p",
{
className: `voice-visualizer__progress-indicator-time ${T - u * T / c < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${q ?? ""}`,
children: he
className: `voice-visualizer__progress-indicator-time ${D - u * D / c < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${q ?? ""}`,
children: le
}

@@ -619,11 +615,12 @@ )

),
Q && /* @__PURE__ */ ee(Ge, { children: [
/* @__PURE__ */ ee("div", { className: "voice-visualizer__audio-info-container", children: [
r && /* @__PURE__ */ a("p", { className: "voice-visualizer__audio-info-time", children: b }),
c && !w ? /* @__PURE__ */ a("p", { children: H }) : null
Q && /* @__PURE__ */ te(Ge, { children: [
/* @__PURE__ */ te("div", { className: "voice-visualizer__audio-info-container", children: [
r && /* @__PURE__ */ a("p", { className: "voice-visualizer__audio-info-time", children: x }),
c && !L ? /* @__PURE__ */ a("p", { children: $ }) : null
] }),
/* @__PURE__ */ ee("div", { className: "voice-visualizer__buttons-container", children: [
/* @__PURE__ */ te("div", { className: "voice-visualizer__buttons-container", children: [
r && /* @__PURE__ */ a("div", { className: "voice-visualizer__btn-container", children: /* @__PURE__ */ a(
"button",
{
type: "button",
className: `voice-visualizer__btn-left ${h ? "voice-visualizer__btn-left-microphone" : ""}`,

@@ -643,10 +640,11 @@ onClick: z,

{
type: "button",
className: `voice-visualizer__btn-left ${r || I ? "voice-visualizer__visually-hidden" : ""}`,
onClick: z,
disabled: w,
disabled: L,
children: /* @__PURE__ */ a(
"img",
{
src: A ? zt : We,
alt: A ? "Play" : "Pause"
src: N ? dt : We,
alt: N ? "Play" : "Pause"
}

@@ -656,5 +654,6 @@ )

),
s && /* @__PURE__ */ ee(
s && /* @__PURE__ */ te(
"button",
{
type: "button",
className: `voice-visualizer__btn-center voice-visualizer__relative ${I ? "voice-visualizer__btn-center--border-transparent" : ""}`,

@@ -672,5 +671,6 @@ onClick: d,

{
type: "button",
className: `voice-visualizer__btn-center voice-visualizer__btn-center-pause ${r ? "" : "voice-visualizer__visually-hidden"}`,
onClick: f,
children: /* @__PURE__ */ a("img", { src: gt, alt: "Stop" })
children: /* @__PURE__ */ a("img", { src: ft, alt: "Stop" })
}

@@ -681,14 +681,16 @@ ),

{
onClick: E,
className: `voice-visualizer__btn ${je ?? ""}`,
disabled: w,
type: "button",
onClick: b,
className: `voice-visualizer__btn ${Ae ?? ""}`,
disabled: L,
children: "Clear"
}
),
ne && i && /* @__PURE__ */ a(
ie && i && /* @__PURE__ */ a(
"button",
{
type: "button",
onClick: S,
className: `voice-visualizer__btn ${je ?? ""}`,
disabled: w,
className: `voice-visualizer__btn ${Ae ?? ""}`,
disabled: L,
children: "Download Audio"

@@ -701,3 +703,3 @@ }

};
function wt({
function pt({
onStartRecording: e,

@@ -709,3 +711,3 @@ onStopRecording: t,

onEndAudioPlayback: u,
onStartAudioPlayback: L,
onStartAudioPlayback: w,
onPausedAudioPlayback: v,

@@ -715,4 +717,4 @@ onResumedAudioPlayback: z,

} = {}) {
const [f, S] = m(!1), [g, A] = m(!1), [h, I] = m(null), [w, s] = m(new Uint8Array(0)), [H, b] = m(!1), [he, E] = m(null), [B, y] = m(null), [W, $] = m(0), [te, re] = m(0), [we, F] = m(0), [D, k] = m(""), [le, J] = m(!0), [O, Q] = m(0), [ne, me] = m(!0), [ve, V] = m(!1), [Le, Se] = m(!1), [ie, ce] = m(null), [Ne, se] = m(!1), M = N(null), Z = N(null), q = N(null), oe = N(null), X = N(null), C = N(null), x = N(null), o = N(null), Ae = !!(B && !H), je = ht(we), de = ot(W), be = Je(O), T = ve || H;
U(() => {
const [f, S] = m(!1), [g, N] = m(!1), [h, I] = m(null), [L, s] = m(new Uint8Array(0)), [$, x] = m(!1), [le, b] = m(null), [B, A] = m(null), [W, O] = m(0), [re, ne] = m(0), [Le, Y] = m(0), [j, k] = m(""), [me, J] = m(!0), [Z, Q] = m(0), [ie, ve] = m(!0), [de, V] = m(!1), [we, Se] = m(!1), [ce, se] = m(null), [Ne, oe] = m(!1), M = y(null), U = y(null), q = y(null), ae = y(null), X = y(null), E = y(null), H = y(null), o = y(null), ye = !!(B && !$), Ae = ut(Le), fe = st(W), be = ke(Z), D = de || $;
F(() => {
if (!f || g)

@@ -722,18 +724,18 @@ return;

const l = performance.now();
$((_) => _ + (l - te)), re(l);
O((T) => T + (l - re)), ne(l);
}, 1e3);
return () => clearInterval(p);
}, [te, g, f]), U(() => {
if (ie) {
G();
}, [re, g, f]), F(() => {
if (ce) {
P();
return;
}
}, [ie]), U(() => () => {
G();
}, []), U(() => (ne || window.addEventListener("beforeunload", ye), () => {
window.removeEventListener("beforeunload", ye);
}), [ne]);
const ye = (n) => {
}, [ce]), F(() => () => {
P();
}, []), F(() => (ie || window.addEventListener("beforeunload", je), () => {
window.removeEventListener("beforeunload", je);
}), [ie]);
const je = (n) => {
n.preventDefault(), n.returnValue = "";
}, ae = async (n) => {
}, ue = async (n) => {
if (n)

@@ -746,42 +748,42 @@ try {

const l = await n.arrayBuffer(), K = await new AudioContext().decodeAudioData(l);
y(K), F(K.duration - 0.06), ce(null);
A(K), Y(K.duration - 0.06), se(null);
} catch (p) {
console.error("Error processing the audio blob:", p), ce(
console.error("Error processing the audio blob:", p), se(
p instanceof Error ? p : new Error("Error processing the audio blob")
);
}
}, xe = (n) => {
n instanceof Blob && (G(), Se(!0), me(!1), b(!0), S(!1), $(0), A(!1), o.current = new Audio(), E(n), ae(n));
}, fe = () => {
se(!0), navigator.mediaDevices.getUserMedia({ audio: !0 }).then((n) => {
me(!1), se(!1), S(!0), re(performance.now()), I(n), Z.current = new window.AudioContext(), q.current = Z.current.createAnalyser(), oe.current = new Uint8Array(
}, Ee = (n) => {
n instanceof Blob && (P(), Se(!0), ve(!1), x(!0), S(!1), O(0), N(!1), o.current = new Audio(), b(n), ue(n));
}, ze = () => {
oe(!0), navigator.mediaDevices.getUserMedia({ audio: !0 }).then((n) => {
ve(!1), oe(!1), S(!0), ne(performance.now()), I(n), U.current = new window.AudioContext(), q.current = U.current.createAnalyser(), ae.current = new Uint8Array(
q.current.frequencyBinCount
), X.current = Z.current.createMediaStreamSource(n), X.current.connect(q.current), M.current = new MediaRecorder(n), M.current.addEventListener(
), X.current = U.current.createMediaStreamSource(n), X.current.connect(q.current), M.current = new MediaRecorder(n), M.current.addEventListener(
"dataavailable",
Y
), M.current.start(), e && e(), ze();
G
), M.current.start(), e && e(), ge();
}).catch((n) => {
se(!1), ce(
oe(!1), se(
n instanceof Error ? n : new Error("Error starting audio recording")
);
});
}, ze = () => {
q.current.getByteTimeDomainData(oe.current), s(new Uint8Array(oe.current)), C.current = requestAnimationFrame(ze);
}, Y = (n) => {
M.current && (M.current = null, o.current = new Audio(), E(n.data), ae(n.data));
}, ge = () => {
o.current && (Q(o.current.currentTime), x.current = requestAnimationFrame(ge));
}, De = () => {
f || Ne || (ne || G(), fe());
}, Te = () => {
q.current.getByteTimeDomainData(ae.current), s(new Uint8Array(ae.current)), E.current = requestAnimationFrame(ge);
}, G = (n) => {
M.current && (M.current = null, o.current = new Audio(), b(n.data), ue(n.data));
}, Me = () => {
o.current && (Q(o.current.currentTime), H.current = requestAnimationFrame(Me));
}, Ce = () => {
f || Ne || (ie || P(), ze());
}, xe = () => {
f && (S(!1), M.current && (M.current.stop(), M.current.removeEventListener(
"dataavailable",
Y
)), h == null || h.getTracks().forEach((n) => n.stop()), C.current && cancelAnimationFrame(C.current), X.current && X.current.disconnect(), Z.current && Z.current.state !== "closed" && Z.current.close(), b(!0), $(0), A(!1), t && t());
}, G = () => {
C.current && (cancelAnimationFrame(C.current), C.current = null), x.current && (cancelAnimationFrame(x.current), x.current = null), M.current && (M.current.removeEventListener(
G
)), h == null || h.getTracks().forEach((n) => n.stop()), E.current && cancelAnimationFrame(E.current), X.current && X.current.disconnect(), U.current && U.current.state !== "closed" && U.current.close(), x(!0), O(0), N(!1), t && t());
}, P = () => {
E.current && (cancelAnimationFrame(E.current), E.current = null), H.current && (cancelAnimationFrame(H.current), H.current = null), M.current && (M.current.removeEventListener(
"dataavailable",
Y
), M.current.stop(), M.current = null), h == null || h.getTracks().forEach((n) => n.stop()), o != null && o.current && (o.current.removeEventListener("ended", P), o.current.pause(), o.current.src = "", o.current = null), Z.current = null, q.current = null, oe.current = null, X.current = null, I(null), se(!1), S(!1), Se(!1), b(!1), E(null), y(null), $(0), re(0), F(0), k(""), Q(0), J(!0), A(!1), V(!1), s(new Uint8Array(0)), ce(null), me(!0), c && c();
}, _e = () => {
G
), M.current.stop(), M.current = null), h == null || h.getTracks().forEach((n) => n.stop()), o != null && o.current && (o.current.removeEventListener("ended", R), o.current.pause(), o.current.src = "", o.current = null), U.current = null, q.current = null, ae.current = null, X.current = null, I(null), oe(!1), S(!1), Se(!1), x(!1), b(null), A(null), O(0), ne(0), Y(0), k(""), Q(0), J(!0), N(!1), V(!1), s(new Uint8Array(0)), se(null), ve(!0), c && c();
}, De = () => {
if (o.current && o.current.paused) {

@@ -795,26 +797,26 @@ const n = o.current.play();

}
}, Ee = () => {
}, Te = () => {
var n, p, l;
if (f) {
A((_) => !_), ((n = M.current) == null ? void 0 : n.state) === "recording" ? ((p = M.current) == null || p.pause(), $((_) => _ + (performance.now() - te)), C.current && cancelAnimationFrame(C.current), r && r()) : (C.current = requestAnimationFrame(ze), (l = M.current) == null || l.resume(), re(performance.now()), i && i());
N((T) => !T), ((n = M.current) == null ? void 0 : n.state) === "recording" ? ((p = M.current) == null || p.pause(), O((T) => T + (performance.now() - re)), E.current && cancelAnimationFrame(E.current), r && r()) : (E.current = requestAnimationFrame(ge), (l = M.current) == null || l.resume(), ne(performance.now()), i && i());
return;
}
if (o.current && Ae)
if (o.current && ye)
if (o.current.paused)
requestAnimationFrame(ge), _e(), o.current.addEventListener("ended", P), J(!1), L && O === 0 && L(), z && O !== 0 && z();
requestAnimationFrame(Me), De(), o.current.addEventListener("ended", R), J(!1), w && Z === 0 && w(), z && Z !== 0 && z();
else {
x.current && cancelAnimationFrame(x.current), o.current.removeEventListener("ended", P), o.current.pause(), J(!0);
const _ = o.current.currentTime;
Q(_), o.current.currentTime = _, v && v();
H.current && cancelAnimationFrame(H.current), o.current.removeEventListener("ended", R), o.current.pause(), J(!0);
const T = o.current.currentTime;
Q(T), o.current.currentTime = T, v && v();
}
}, P = () => {
x.current && cancelAnimationFrame(x.current), J(!0), o != null && o.current && (o.current.currentTime = 0, Q(0), u && u());
}, ue = () => {
}, R = () => {
H.current && cancelAnimationFrame(H.current), J(!0), o != null && o.current && (o.current.currentTime = 0, Q(0), u && u());
}, he = () => {
var p;
if (!D)
if (!j)
return;
const n = document.createElement("a");
n.href = D, n.download = `recorded_audio${ut(
n.href = j, n.download = `recorded_audio${at(
(p = M.current) == null ? void 0 : p.mimeType
)}`, document.body.appendChild(n), n.click(), document.body.removeChild(n), URL.revokeObjectURL(D);
)}`, document.body.appendChild(n), n.click(), document.body.removeChild(n), URL.revokeObjectURL(j);
};

@@ -825,29 +827,29 @@ return {

isPausedRecording: g,
audioData: w,
audioData: L,
recordingTime: W,
isProcessingRecordedAudio: T,
recordedBlob: he,
isProcessingRecordedAudio: D,
recordedBlob: le,
mediaRecorder: M.current,
duration: we,
currentAudioTime: O,
audioSrc: D,
isPausedRecordedAudio: le,
duration: Le,
currentAudioTime: Z,
audioSrc: j,
isPausedRecordedAudio: me,
bufferFromRecordedBlob: B,
isCleared: ne,
isAvailableRecordedAudio: Ae,
formattedDuration: je,
formattedRecordingTime: de,
isCleared: ie,
isAvailableRecordedAudio: ye,
formattedDuration: Ae,
formattedRecordingTime: fe,
formattedRecordedAudioCurrentTime: be,
startRecording: De,
togglePauseResume: Ee,
stopRecording: Te,
saveAudioFile: ue,
clearCanvas: G,
startRecording: Ce,
togglePauseResume: Te,
stopRecording: xe,
saveAudioFile: he,
clearCanvas: P,
setCurrentAudioTime: Q,
error: ie,
isProcessingOnResize: ve,
error: ce,
isProcessingOnResize: de,
isProcessingStartRecording: Ne,
isPreloadedBlob: Le,
setPreloadedAudioBlob: xe,
_setIsProcessingAudioOnComplete: b,
isPreloadedBlob: we,
setPreloadedAudioBlob: Ee,
_setIsProcessingAudioOnComplete: x,
_setIsProcessingOnResize: V

@@ -857,4 +859,4 @@ };

export {
It as VoiceVisualizer,
wt as useVoiceVisualizer
Mt as VoiceVisualizer,
pt as useVoiceVisualizer
};
{
"name": "react-voice-visualizer",
"private": false,
"version": "2.0.4",
"version": "2.0.5",
"type": "module",

@@ -6,0 +6,0 @@ "author": "Yurii Zarytskyi",

Sorry, the diff of this file is not supported yet