You're Invited:Meet the Socket Team at BlackHat and DEF CON in Las Vegas, Aug 4-6.RSVP
Socket
Book a DemoInstallSign in
Socket

react-voice-visualizer

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

react-voice-visualizer - npm Package Compare versions

Comparing version

to
1.1.0

1

dist/components/VoiceVisualizer.d.ts

@@ -22,2 +22,3 @@ /// <reference types="react" />

defaultAudioWaveIconColor?: string;
mainContainerClassName?: string;
canvasContainerClassName?: string;

@@ -24,0 +25,0 @@ isProgressIndicatorShown?: boolean;

4

dist/helpers/getBarsData.d.ts

@@ -1,2 +0,2 @@

import { BarsData } from "../types/types.ts";
export declare const getBarsData: (buffer: AudioBuffer, height: number, width: number, barWidth: number, gap: number) => BarsData[];
import { BarsData, GetBarsDataParams } from "../types/types.ts";
export declare const getBarsData: ({ buffer, height, width, barWidth, gap, }: GetBarsDataParams) => BarsData[];

@@ -1,11 +0,11 @@

(function(){"use strict";(e=>{try{if(typeof window>"u")return;var i=document.createElement("style");i.appendChild(document.createTextNode(e)),document.head.appendChild(i)}catch(o){console.error("vite-plugin-css-injected-by-js",o)}})(".voice-visualizer__buttons-container{display:flex;justify-content:center;align-items:center;column-gap:20px;row-gap:15px;flex-wrap:wrap;margin-bottom:40px}.voice-visualizer__btn-center{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#fff;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s}.voice-visualizer__btn-center:hover{background-color:#eaeaea}.voice-visualizer__btn-center>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause{background-color:#ff3030}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover{background-color:#ff4f4f}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause>img{height:50%;max-height:16px}.voice-visualizer__btn-center:hover{border:4px solid #9f9f9f}.voice-visualizer__btn-left{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#ff3030;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s,opacity .3s}.voice-visualizer__btn-left:hover{background-color:#ff4f4f}.voice-visualizer__btn-left:disabled{opacity:.6;background-color:#ff3030}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone{background-color:#fff}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-left>img{width:auto;height:50%;max-height:16px}.voice-visualizer__btn-left:hover{border:4px solid #9f9f9f}.voice-visualizer__btn{box-sizing:border-box;min-width:100px;min-height:60px;padding:5px 20px;border-radius:40px;font-size:15px;background-color:#f0f0f0;transition:background-color .3s,opacity .3s}.voice-visualizer__btn:disabled{opacity:.8;background-color:#f0f0f0}.voice-visualizer__btn:hover{background-color:#bebebe}.voice-visualizer__canvas-container{position:relative;width:fit-content;margin:0 auto;overflow:hidden}.voice-visualizer__canvas-container canvas{display:block}.voice-visualizer__canvas-microphone-btn{position:absolute;top:50%;left:50%;width:auto;max-width:12%;min-width:24px;height:50%;max-height:100px;background-color:transparent;border:none;outline:none;transform:translate(-50%,-50%)}.voice-visualizer__canvas-microphone-icon{width:100%;height:100%;will-change:transform;transition:transform .35s}.voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon{transform:scale(1.03)}.voice-visualizer__canvas-audio-wave-icon{position:absolute;top:50%;left:50%;width:auto;max-width:40%;height:40%;max-height:100px;transform:translate(-118%,-50%) scale(-1)}.voice-visualizer__canvas-audio-wave-icon2{transform:translate(18%,-50%)}.voice-visualizer__canvas-audio-processing{position:absolute;top:50%;left:50%;margin:0;transform:translate(-50%,-50%)}.voice-visualizer__progress-indicator-hovered{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#85858599}.voice-visualizer__progress-indicator-hovered-time{position:absolute;top:3%;left:1px;width:fit-content;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;background-color:#575757;opacity:.8}.voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__progress-indicator{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#efefef}.voice-visualizer__progress-indicator-time{position:absolute;top:3%;left:1px;width:fit-content;box-sizing:border-box;min-width:41px;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;text-align:left;color:#000;font-weight:500;background-color:#efefef}.voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__audio-info-container{box-sizing:border-box;height:55px;display:flex;align-items:center;justify-content:center;gap:30px}.voice-visualizer__audio-info-time{margin:15px 0;min-width:38px;text-align:left}.voice-visualizer__visually-hidden{position:absolute;width:1px;height:1px;margin:-1px;padding:0;border:4px solid #c5c5c5;white-space:nowrap;clip-path:inset(100%);clip:rect(0 0 0 0);overflow:hidden}")})();
import { jsx as s, jsxs as te, Fragment as Ee } from "react/jsx-runtime";
import { forwardRef as Oe, useState as m, useRef as E, useEffect as $, useLayoutEffect as Ue } from "react";
const pe = ({
(function(){"use strict";(e=>{try{if(typeof window>"u")return;var i=document.createElement("style");i.appendChild(document.createTextNode(e)),document.head.appendChild(i)}catch(o){console.error("vite-plugin-css-injected-by-js",o)}})(".voice-visualizer__buttons-container{display:flex;justify-content:center;align-items:center;column-gap:20px;row-gap:15px;flex-wrap:wrap;margin-bottom:40px}.voice-visualizer__btn-center{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#fff;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s}.voice-visualizer__btn-center:hover{background-color:#eaeaea}.voice-visualizer__btn-center>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause{background-color:#ff3030}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover{background-color:#ff4f4f}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause>img{height:50%;max-height:16px}.voice-visualizer__btn-center:hover{border:4px solid #9f9f9f}.voice-visualizer__btn-left{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#ff3030;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s,opacity .3s}.voice-visualizer__btn-left:hover{background-color:#ff4f4f}.voice-visualizer__btn-left:disabled{opacity:.6;background-color:#ff3030}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone{background-color:#fff}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-left>img{width:auto;height:50%;max-height:16px}.voice-visualizer__btn-left:hover{border:4px solid #9f9f9f}.voice-visualizer__btn{box-sizing:border-box;min-width:100px;min-height:60px;padding:5px 20px;border-radius:40px;font-size:15px;background-color:#f0f0f0;transition:background-color .3s,opacity .3s}.voice-visualizer__btn:disabled{opacity:.8;background-color:#f0f0f0}.voice-visualizer__btn:hover{background-color:#bebebe}.voice-visualizer__canvas-container{position:relative;width:fit-content;margin:0 auto;overflow:hidden}.voice-visualizer__canvas-container canvas{display:block}.voice-visualizer__canvas-microphone-btn{position:absolute;top:50%;left:50%;width:auto;max-width:12%;min-width:24px;height:50%;max-height:100px;background-color:transparent;border:none;outline:none;transform:translate(-50%,-50%)}.voice-visualizer__canvas-microphone-icon{width:100%;height:100%;will-change:transform;transition:transform .35s}.voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon{transform:scale(1.03)}.voice-visualizer__canvas-audio-wave-icon{position:absolute;top:50%;left:50%;width:auto;max-width:40%;height:40%;max-height:100px;transform:translate(-118%,-50%) scale(-1)}.voice-visualizer__canvas-audio-wave-icon2{transform:translate(18%,-50%)}.voice-visualizer__canvas-audio-processing{position:absolute;top:50%;left:50%;margin:0;transform:translate(-50%,-50%)}.voice-visualizer__progress-indicator-hovered{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#85858599}.voice-visualizer__progress-indicator-hovered-time{position:absolute;top:3%;left:1px;width:fit-content;margin:0;padding:0 7px;opacity:.8;font-size:12px;border-radius:0 4px 4px 0;background-color:#575757;text-align:left}.voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left{left:unset;right:1px;border-radius:4px 0 0 4px;text-align:right}.voice-visualizer__progress-indicator{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#efefef}.voice-visualizer__progress-indicator-time{position:absolute;top:3%;left:1px;width:fit-content;box-sizing:border-box;min-width:41px;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;text-align:left;color:#000;font-weight:500;background-color:#efefef}.voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left{left:unset;right:1px;border-radius:4px 0 0 4px;text-align:right}.voice-visualizer__audio-info-container{box-sizing:border-box;height:55px;display:flex;align-items:center;justify-content:center;gap:30px}.voice-visualizer__audio-info-time{margin:15px 0;min-width:38px;text-align:left}.voice-visualizer__visually-hidden{position:absolute;width:1px;height:1px;margin:-1px;padding:0;border:4px solid #c5c5c5;white-space:nowrap;clip-path:inset(100%);clip:rect(0 0 0 0);overflow:hidden}")})();
import { jsx as s, jsxs as ne, Fragment as Te } from "react/jsx-runtime";
import { forwardRef as Fe, useState as v, useRef as y, useEffect as x, useLayoutEffect as Ge } from "react";
const we = ({
canvas: e,
backgroundColor: r
}) => {
const n = e.height, t = e.width, i = Math.round(t / 2), o = e.getContext("2d");
return o ? (o.clearRect(0, 0, t, n), r !== "transparent" && (o.fillStyle = r, o.fillRect(0, 0, t, n)), { context: o, height: n, width: t, halfWidth: i }) : null;
}, de = ({
const n = e.height, t = e.width, c = Math.round(t / 2), o = e.getContext("2d");
return o ? (o.clearRect(0, 0, t, n), r !== "transparent" && (o.fillStyle = r, o.fillRect(0, 0, t, n)), { context: o, height: n, width: t, halfWidth: c }) : null;
}, ze = ({
context: e,

@@ -15,8 +15,8 @@ color: r,

x: t,
y: i,
y: c,
w: o,
h: d
h: w
}) => {
e.fillStyle = r, e.beginPath(), e.roundRect ? (e.roundRect(t, i, o, d, n), e.fill()) : e.fillRect(t, i, o, d);
}, Ye = ({
e.fillStyle = r, e.beginPath(), e.roundRect ? (e.roundRect(t, c, o, w, n), e.fill()) : e.fillRect(t, c, o, w);
}, Be = ({
barsData: e,

@@ -26,22 +26,22 @@ canvas: r,

gap: t,
backgroundColor: i,
backgroundColor: c,
mainBarColor: o,
secondaryBarColor: d,
currentAudioTime: M = 0,
rounded: b,
secondaryBarColor: w,
currentAudioTime: L = 0,
rounded: A,
duration: u
}) => {
const z = pe({ canvas: r, backgroundColor: i });
if (!z)
const g = we({ canvas: r, backgroundColor: c });
if (!g)
return;
const { context: S, height: I } = z, N = M / u;
e.forEach((f, a) => {
const p = a / e.length, h = N > p;
de({
const { context: S, height: f } = g, b = L / u;
e.forEach((M, h) => {
const d = h / e.length, a = b > d;
ze({
context: S,
color: h ? d : o,
rounded: b,
x: a * (n + t * n),
y: I / 2 - f.max,
h: f.max * 2,
color: a ? w : o,
rounded: A,
x: h * (n + t * n),
y: f / 2 - M.max,
h: M.max * 2,
w: n

@@ -51,3 +51,3 @@ });

};
function Ge({
function ke({
context: e,

@@ -57,6 +57,6 @@ color: r,

width: t,
height: i,
height: c,
barWidth: o
}) {
de({
ze({
context: e,

@@ -66,3 +66,3 @@ color: r,

x: t / 2 + o / 2,
y: i / 2 - 1,
y: c / 2 - 1,
h: 2,

@@ -72,3 +72,3 @@ w: t - (t / 2 + o / 2)

}
const Fe = ({
const We = ({
audioData: e,

@@ -78,66 +78,66 @@ unit: r,

index2: t,
canvas: i,
canvas: c,
isRecordingInProgress: o,
isPausedRecording: d,
picks: M,
backgroundColor: b,
isPausedRecording: w,
picks: L,
backgroundColor: A,
barWidth: u,
mainBarColor: z,
mainBarColor: g,
secondaryBarColor: S,
rounded: I,
animateCurrentPick: N,
fullscreen: f
rounded: f,
animateCurrentPick: b,
fullscreen: M
}) => {
const a = pe({ canvas: i, backgroundColor: b });
if (!a)
const h = we({ canvas: c, backgroundColor: A });
if (!h)
return;
const { context: p, height: h, width: R, halfWidth: Z } = a;
const { context: d, height: a, width: P, halfWidth: O } = h;
if (e != null && e.length && o) {
const T = Math.max(...e);
if (!d) {
const $ = Math.max(...e);
if (!w) {
if (t.current >= u) {
t.current = 0;
const L = (h - T / 258 * h) / h * 100, F = (-h + T / 258 * h * 2) / h * 100, O = n.current === u ? {
startY: L,
barHeight: F
const I = (a - $ / 258 * a) / a * 100, T = (-a + $ / 258 * a * 2) / a * 100, _ = n.current === u ? {
startY: I,
barHeight: T
} : null;
n.current >= r ? n.current = u : n.current += u, M.length > (f ? R : Z) / u && M.pop(), M.unshift(O);
n.current >= r ? n.current = u : n.current += u, L.length > (M ? P : O) / u && L.pop(), L.unshift(_);
}
t.current += 1;
}
!f && P(), N && de({
context: p,
rounded: I,
color: z,
x: f ? R : Z,
y: h - T / 258 * h,
h: -h + T / 258 * h * 2,
!M && H(), b && ze({
context: d,
rounded: f,
color: g,
x: M ? P : O,
y: a - $ / 258 * a,
h: -a + $ / 258 * a * 2,
w: u
});
let x = (f ? R : Z) - t.current;
M.forEach((L) => {
L && de({
context: p,
color: z,
rounded: I,
x,
y: L.startY * h / 100 > h / 2 - 1 ? h / 2 - 1 : L.startY * h / 100,
h: L.barHeight * h / 100 > 2 ? L.barHeight * h / 100 : 2,
let q = (M ? P : O) - t.current;
L.forEach((I) => {
I && ze({
context: d,
color: g,
rounded: f,
x: q,
y: I.startY * a / 100 > a / 2 - 1 ? a / 2 - 1 : I.startY * a / 100,
h: I.barHeight * a / 100 > 2 ? I.barHeight * a / 100 : 2,
w: u
}), x -= u;
}), q -= u;
});
} else
M.length = 0;
function P() {
Ge({
context: p,
L.length = 0;
function H() {
ke({
context: d,
color: S,
rounded: I,
width: R,
height: h,
rounded: f,
width: P,
height: a,
barWidth: u
});
}
}, Te = (e) => {
const r = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), t = e % 60, i = Math.floor(
}, _e = (e) => {
const r = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), t = e % 60, c = Math.floor(
(t - Math.floor(t)) * 1e3

@@ -149,16 +149,16 @@ );

)}:${String(Math.floor(t)).padStart(2, "0")}:${String(
i
c
).charAt(0)}` : n > 0 ? `${String(n).padStart(2, "0")}:${String(
Math.floor(t)
).padStart(2, "0")}:${String(i).charAt(0)}` : `${String(Math.floor(t)).padStart(2, "0")}:${String(
i
).padStart(2, "0")}:${String(c).charAt(0)}` : `${String(Math.floor(t)).padStart(2, "0")}:${String(
c
).charAt(0)}`;
}, ke = (e) => {
const r = Math.floor(e / 1e3), n = Math.floor(r / 3600), t = Math.floor(r % 3600 / 60), i = r % 60;
}, Je = (e) => {
const r = Math.floor(e / 1e3), n = Math.floor(r / 3600), t = Math.floor(r % 3600 / 60), c = r % 60;
return n > 0 ? `${String(n).padStart(2, "0")}:${String(t).padStart(
2,
"0"
)}:${String(i).padStart(2, "0")}` : `${String(t).padStart(2, "0")}:${String(i).padStart(2, "0")}`;
)}:${String(c).padStart(2, "0")}` : `${String(t).padStart(2, "0")}:${String(c).padStart(2, "0")}`;
};
function _e(e) {
function Ce(e) {
if (typeof e == "string") {

@@ -171,23 +171,29 @@ const r = Number(e);

}
const Be = (e, r, n, t, i) => {
const o = e.getChannelData(0), d = n / (t + i * t), M = Math.floor(o.length / d), b = r / 2;
let u = [], z = 0;
for (let S = 0; S < d; S++) {
const I = [];
let N = 0;
for (let a = 0; a < M && S * M + a < e.length; a++) {
const p = o[S * M + a];
p > 0 && (I.push(p), N++);
const Qe = ({
buffer: e,
height: r,
width: n,
barWidth: t,
gap: c
}) => {
const o = e.getChannelData(0), w = n / (t + c * t), L = Math.floor(o.length / w), A = r / 2;
let u = [], g = 0;
for (let S = 0; S < w; S++) {
const f = [];
let b = 0;
for (let h = 0; h < L && S * L + h < e.length; h++) {
const d = o[S * L + h];
d > 0 && (f.push(d), b++);
}
const f = I.reduce((a, p) => a + p, 0) / N;
f > z && (z = f), u.push({ max: f });
const M = f.reduce((h, d) => h + d, 0) / b;
M > g && (g = M), u.push({ max: M });
}
if (b * 0.95 > z * b) {
const S = b * 0.95 / z;
u = u.map((I) => ({
max: I.max > 0.01 ? I.max * S : 1
if (A * 0.95 > g * A) {
const S = A * 0.95 / g;
u = u.map((f) => ({
max: f.max > 0.01 ? f.max * S : 1
}));
}
return u;
}, We = (e) => {
}, Ve = (e) => {
if (!e)

@@ -197,4 +203,4 @@ return "";

return r && r.length >= 2 ? `.${r[1]}` : "";
}, Je = (e) => {
const r = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), t = e % 60, i = Math.floor(
}, qe = (e) => {
const r = Math.floor(e / 3600), n = Math.floor(e % 3600 / 60), t = e % 60, c = Math.floor(
(t - Math.floor(t)) * 1e3

@@ -208,6 +214,6 @@ );

).padStart(2, "0")}m` : `${String(Math.floor(t)).padStart(2, "0")}:${String(
i
).charAt(0)}${String(i).charAt(1)}s`;
c
).charAt(0)}${String(c).charAt(1)}s`;
};
const Qe = ({
const Xe = ({
color: e = "#000000",

@@ -234,3 +240,3 @@ stroke: r = 2,

}
), be = ({
), xe = ({
color: e = "#FFFFFF",

@@ -252,3 +258,3 @@ reflect: r

}
), xe = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", Ve = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", Ce = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", qe = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo=", et = Oe(
), He = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjMiIGhlaWdodD0iMzMiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTEuMSAxNi43MmMwIDMgLjk2IDUuOCAzLjYxIDcuOTVhOS45NiA5Ljk2IDAgMCAwIDYuNSAyLjE3bTAgMHY0LjM0aDQuMzQtOC42N200LjM0LTQuMzRjMi4zNSAwIDQuNDItLjQ4IDYuNS0yLjE3YTkuODcgOS44NyAwIDAgMCAzLjYxLTcuOTVNMTEuMjIgMS44MmMtMS40NSAwLTIuNS4zNy0zLjMuOTNhNS42IDUuNiAwIDAgMC0xLjg0IDIuNGMtLjkgMi4wNi0xLjEgNC43Ny0xLjEgNy4yNCAwIDIuNDYuMiA1LjE3IDEuMSA3LjI0YTUuNiA1LjYgMCAwIDAgMS44NCAyLjRjLjguNTUgMS44NS45MiAzLjMuOTIgMS40NCAwIDIuNS0uMzcgMy4yOS0uOTNhNS42IDUuNiAwIDAgMCAxLjg0LTIuNGMuOS0yLjA2IDEuMS00Ljc3IDEuMS03LjIzIDAtMi40Ny0uMi01LjE4LTEuMS03LjI0YTUuNiA1LjYgMCAwIDAtMS44NC0yLjQgNS41MiA1LjUyIDAgMCAwLTMuMy0uOTNaIiBzdHJva2U9IiMwMDAiIHN0cm9rZS1saW5lY2FwPSJyb3VuZCIgc3Ryb2tlLWxpbmVqb2luPSJyb3VuZCIvPgo8L3N2Zz4K", Ke = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjYiIGhlaWdodD0iMjQiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE4Ljc1IDYuMTZjNC4zMSAyLjYgNi40NiAzLjkgNi40NiA1Ljg0IDAgMS45NS0yLjE1IDMuMjQtNi40NiA1Ljg0bC00Ljg0IDIuOTJjLTQuMzEgMi42LTYuNDYgMy44OS04LjA4IDIuOTItMS42Mi0uOTgtMS42Mi0zLjU3LTEuNjItOC43NlY5LjA4YzAtNS4xOSAwLTcuNzggMS42Mi04Ljc2IDEuNjItLjk3IDMuNzcuMzMgOC4wOCAyLjkybDQuODQgMi45MloiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", $e = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjEiIGhlaWdodD0iMjkiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHBhdGggZD0iTTE0IDMuNWEzLjUgMy41IDAgMSAxIDcgMHYyMmEzLjUgMy41IDAgMSAxLTcgMHYtMjJaIiBmaWxsPSIjZmZmIi8+CiAgPHJlY3Qgd2lkdGg9IjciIGhlaWdodD0iMjkiIHJ4PSIzLjUiIGZpbGw9IiNmZmYiLz4KPC9zdmc+Cg==", et = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMjciIGhlaWdodD0iMjUiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CiAgPHJlY3QgeD0iLjIxIiB3aWR0aD0iMjYiIGhlaWdodD0iMjUiIHJ4PSI1IiBmaWxsPSIjZmZmIi8+Cjwvc3ZnPgo=", nt = Fe(
({

@@ -260,196 +266,193 @@ controls: {

duration: t,
audioSrc: i,
audioSrc: c,
currentAudioTime: o,
bufferFromRecordedBlob: d,
togglePauseResume: M,
startRecording: b,
bufferFromRecordedBlob: w,
togglePauseResume: L,
startRecording: A,
stopRecording: u,
saveAudioFile: z,
saveAudioFile: g,
recordingTime: S,
isPausedRecordedAudio: I,
isPausedRecording: N,
isProcessingRecordedAudio: f,
isCleared: a,
clearCanvas: p,
_handleTimeUpdate: h
isAvailableRecordedAudio: f,
isPausedRecordedAudio: b,
isPausedRecording: M,
isProcessingRecordedAudio: h,
isCleared: d,
clearCanvas: a,
_setIsProcessingRecordedAudio: P,
_handleTimeUpdate: O
},
width: R = "100%",
height: Z = 200,
speed: P = 3,
backgroundColor: T = "transparent",
mainBarColor: x = "#FFFFFF",
secondaryBarColor: L = "#5e5e5e",
barWidth: F = 2,
gap: O = 1,
rounded: J = 5,
isControlPanelShown: re = !0,
isDownloadAudioButtonShown: ne = !1,
animateCurrentPick: U = !0,
fullscreen: l = !0,
onlyRecording: A = !1,
isDefaultUIShown: Y = !1,
defaultMicrophoneIconColor: Q = x,
defaultAudioWaveIconColor: C = x,
canvasContainerClassName: _,
isProgressIndicatorShown: V = !A,
progressIndicatorClassName: v,
isProgressIndicatorTimeShown: ze = !0,
progressIndicatorTimeClassName: ie,
isProgressIndicatorOnHoverShown: q = !A,
progressIndicatorOnHoverClassName: he,
isProgressIndicatorTimeOnHoverShown: fe = !0,
progressIndicatorTimeOnHoverClassName: ge,
isAudioProcessingTextShown: X = !0,
audioProcessingTextClassName: Me,
controlButtonsClassName: ue
}, H) => {
const [ce, c] = m(0), [y, j] = m([]), [w, K] = m(0), [k, B] = m(0), [ee, oe] = m(0), [le, Le] = m(!1), [Se, He] = m(0), Ie = Math.trunc(P), ve = Math.trunc(O), G = Math.trunc(
Se < 768 && ve > 0 ? F + 1 : F
), g = E(null), we = E([]), me = E(Ie), $e = E(G), Re = E(G), ae = E(null), Ze = G + ve * G;
$(() => {
const D = () => {
if (!ae.current || !g.current)
width: H = "100%",
height: $ = 200,
speed: q = 3,
backgroundColor: I = "transparent",
mainBarColor: T = "#FFFFFF",
secondaryBarColor: _ = "#5e5e5e",
barWidth: J = 2,
gap: X = 1,
rounded: U = 5,
isControlPanelShown: ge = !0,
isDownloadAudioButtonShown: ie = !1,
animateCurrentPick: Y = !0,
fullscreen: l = !1,
onlyRecording: N = !1,
isDefaultUIShown: F = !0,
defaultMicrophoneIconColor: K = T,
defaultAudioWaveIconColor: C = T,
mainContainerClassName: j,
canvasContainerClassName: ee,
isProgressIndicatorShown: m = !N,
progressIndicatorClassName: he,
isProgressIndicatorTimeShown: ue = !0,
progressIndicatorTimeClassName: Me,
isProgressIndicatorOnHoverShown: ce = !N,
progressIndicatorOnHoverClassName: te,
isProgressIndicatorTimeOnHoverShown: le = !0,
progressIndicatorTimeOnHoverClassName: Ie,
isAudioProcessingTextShown: pe = !0,
audioProcessingTextClassName: re,
controlButtonsClassName: me
}, Q) => {
const [G, Le] = v(0), [i, D] = v([]), [z, R] = v(0), [B, oe] = v(0), [Z, ve] = v(0), [k, Ne] = v(!1), [Re, Ze] = v(0), ye = Re < 768, Se = Math.trunc(q), de = Math.trunc(X), W = Math.trunc(
ye && de > 0 ? J + 1 : J
), p = y(null), Ae = y([]), fe = y(Se), Pe = y(W), Oe = y(W), ae = y(null), Ue = W + de * W;
x(() => {
const E = () => {
if (!ae.current || !p.current)
return;
me.current = Ie;
const W = Math.trunc(
fe.current = Se;
const V = Math.trunc(
ae.current.clientHeight * window.devicePixelRatio / 2
) * 2;
K(ae.current.clientWidth), B(W), oe(
R(ae.current.clientWidth), oe(V), ve(
Math.round(
ae.current.clientWidth * window.devicePixelRatio
)
), He(window.innerWidth);
), Ze(window.innerWidth);
};
return D(), window.addEventListener("resize", D), () => {
window.removeEventListener("resize", D);
return E(), window.addEventListener("resize", E), () => {
window.removeEventListener("resize", E);
};
}, [R]), Ue(() => {
g.current && ((me.current >= Ie || !e.length) && (me.current = 0, Fe({
}, [H]), Ge(() => {
p.current && ((fe.current >= Se || !e.length) && (fe.current = 0, We({
audioData: e,
unit: Ze,
index: $e,
index2: Re,
canvas: g.current,
picks: we.current,
unit: Ue,
index: Pe,
index2: Oe,
canvas: p.current,
picks: Ae.current,
isRecordingInProgress: r,
isPausedRecording: N,
backgroundColor: T,
mainBarColor: x,
secondaryBarColor: L,
barWidth: G,
rounded: J,
animateCurrentPick: U,
isPausedRecording: M,
backgroundColor: I,
mainBarColor: T,
secondaryBarColor: _,
barWidth: W,
rounded: U,
animateCurrentPick: Y,
fullscreen: l
})), me.current += 1);
})), fe.current += 1);
}, [
g.current,
p.current,
e,
G,
W,
I,
T,
x,
L,
J,
_,
U,
l,
Y,
ee
]), $(() => (a || window.addEventListener("beforeunload", Ae), () => {
window.removeEventListener("beforeunload", Ae);
}), [a]), $(() => {
var D, W;
if (d)
return le ? (D = g.current) == null || D.addEventListener("mouseleave", ye) : (W = g.current) == null || W.addEventListener("mouseenter", Ne), () => {
var se, De;
le ? (se = g.current) == null || se.removeEventListener(
F,
Z
]), x(() => {
var E, V;
if (f)
return k ? (E = p.current) == null || E.addEventListener("mouseleave", De) : (V = p.current) == null || V.addEventListener("mouseenter", je), () => {
var se, Ee;
k ? (se = p.current) == null || se.removeEventListener(
"mouseleave",
ye
) : (De = g.current) == null || De.removeEventListener(
De
) : (Ee = p.current) == null || Ee.removeEventListener(
"mouseenter",
Ne
je
);
};
}, [le, d]), $(() => {
var W;
if (!d || !g.current || r)
}, [k, f]), x(() => {
var V;
if (!w || !p.current || r)
return;
if (A) {
p();
if (N) {
a();
return;
}
return (() => {
we.current = [], j(
Be(
d,
k,
ee,
G,
ve
)
);
})(), (W = g.current) == null || W.addEventListener(
Ae.current = [];
const E = Qe({
buffer: w,
height: B,
width: Z,
barWidth: W,
gap: de
});
return D(E), (V = p.current) == null || V.addEventListener(
"mousemove",
je
be
), () => {
var se;
(se = g.current) == null || se.removeEventListener(
(se = p.current) == null || se.removeEventListener(
"mousemove",
je
be
);
};
}, [
d,
w,
k,
O,
F
]), $(() => {
if (!(A || !y.length || !g.current)) {
if (a) {
j([]);
z,
B,
X,
J
]), x(() => {
if (!(N || !(i != null && i.length) || !p.current)) {
if (d) {
D([]);
return;
}
Ye({
barsData: y,
canvas: g.current,
barWidth: G,
gap: ve,
backgroundColor: T,
mainBarColor: x,
secondaryBarColor: L,
Be({
barsData: i,
canvas: p.current,
barWidth: W,
gap: de,
backgroundColor: I,
mainBarColor: T,
secondaryBarColor: _,
currentAudioTime: o,
rounded: J,
rounded: U,
duration: t
});
}), P(!1);
}
}, [
y,
i,
o,
a,
J,
d,
U,
I,
T,
x,
L
]), $(() => {
f && g.current && pe({
canvas: g.current,
backgroundColor: T
_
]), x(() => {
h && p.current && we({
canvas: p.current,
backgroundColor: I
});
}, [f]);
const Ae = (D) => {
D.preventDefault(), D.returnValue = "";
}, Ne = () => {
Le(!0);
}, ye = () => {
Le(!1);
}, je = (D) => {
c(D.offsetX);
}, Pe = (D) => {
H != null && H.current && g.current && (H.current.currentTime = t / w * (D.clientX - g.current.getBoundingClientRect().left));
}, [h]);
const je = () => {
Ne(!0);
}, De = () => {
Ne(!1);
}, be = (E) => {
Le(E.offsetX);
}, Ye = (E) => {
Q != null && Q.current && p.current && (Q.current.currentTime = t / z * (E.clientX - p.current.getBoundingClientRect().left));
};
return /* @__PURE__ */ te("div", { className: "voice-visualizer", children: [
/* @__PURE__ */ te(
return /* @__PURE__ */ ne("div", { className: `voice-visualizer ${j ?? ""}`, children: [
/* @__PURE__ */ ne(
"div",
{
className: `voice-visualizer__canvas-container ${_ ?? ""}`,
className: `voice-visualizer__canvas-container ${ee ?? ""}`,
ref: ae,
style: { width: _e(R) },
style: { width: Ce(H) },
children: [

@@ -459,9 +462,9 @@ /* @__PURE__ */ s(

{
ref: g,
width: ee,
height: k,
onClick: Pe,
ref: p,
width: Z,
height: B,
onClick: Ye,
style: {
height: _e(Z),
width: w
height: Ce($),
width: z
},

@@ -471,14 +474,14 @@ children: "Your browser does not support HTML5 Canvas."

),
Y && a && /* @__PURE__ */ te(Ee, { children: [
/* @__PURE__ */ s(be, { color: C }),
/* @__PURE__ */ s(be, { color: C, reflect: !0 }),
F && d && /* @__PURE__ */ ne(Te, { children: [
/* @__PURE__ */ s(xe, { color: C }),
/* @__PURE__ */ s(xe, { color: C, reflect: !0 }),
/* @__PURE__ */ s(
"button",
{
onClick: b,
onClick: A,
className: "voice-visualizer__canvas-microphone-btn",
children: /* @__PURE__ */ s(
Qe,
Xe,
{
color: Q,
color: K,
stroke: 0.5,

@@ -491,26 +494,25 @@ className: "voice-visualizer__canvas-microphone-icon"

] }),
X && f && /* @__PURE__ */ s(
pe && h && /* @__PURE__ */ s(
"p",
{
className: `voice-visualizer__canvas-audio-processing ${Me ?? ""}`,
style: { color: x },
className: `voice-visualizer__canvas-audio-processing ${re ?? ""}`,
style: { color: T },
children: "Processing Audio..."
}
),
le && d && q && /* @__PURE__ */ s(
k && f && !ye && ce && /* @__PURE__ */ s(
"div",
{
className: `voice-visualizer__progress-indicator-hovered ${he ?? ""}`,
className: `voice-visualizer__progress-indicator-hovered ${te ?? ""}`,
style: {
left: ce,
display: d && Se > 768 ? "block" : "none"
left: G
},
children: fe && /* @__PURE__ */ s(
children: le && /* @__PURE__ */ s(
"p",
{
className: `voice-visualizer__progress-indicator-hovered-time
${w - ce < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""}
${ge ?? ""}`,
children: Te(
t / w * ce
${z - G < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""}
${Ie ?? ""}`,
children: _e(
t / z * G
)

@@ -521,14 +523,14 @@ }

),
d && t && V ? /* @__PURE__ */ s(
m && f && t ? /* @__PURE__ */ s(
"div",
{
className: `voice-visualizer__progress-indicator ${v ?? ""}`,
className: `voice-visualizer__progress-indicator ${he ?? ""}`,
style: {
left: o / t * w
left: o / t * z
},
children: ze && /* @__PURE__ */ s(
children: ue && /* @__PURE__ */ s(
"p",
{
className: `voice-visualizer__progress-indicator-time ${w - o * w / t < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${ie ?? ""}`,
children: Te(o)
className: `voice-visualizer__progress-indicator-time ${z - o * z / t < 70 ? "voice-visualizer__progress-indicator-time-left" : ""} ${Me ?? ""}`,
children: _e(o)
}

@@ -541,18 +543,18 @@ )

),
re && /* @__PURE__ */ te(Ee, { children: [
/* @__PURE__ */ te("div", { className: "voice-visualizer__audio-info-container", children: [
r && /* @__PURE__ */ s("p", { className: "voice-visualizer__audio-info-time", children: ke(S) }),
t ? /* @__PURE__ */ s("p", { children: Je(t) }) : null
ge && /* @__PURE__ */ ne(Te, { children: [
/* @__PURE__ */ ne("div", { className: "voice-visualizer__audio-info-container", children: [
r && /* @__PURE__ */ s("p", { className: "voice-visualizer__audio-info-time", children: Je(S) }),
t && !h ? /* @__PURE__ */ s("p", { children: qe(t) }) : null
] }),
/* @__PURE__ */ te("div", { className: "voice-visualizer__buttons-container", children: [
/* @__PURE__ */ ne("div", { className: "voice-visualizer__buttons-container", children: [
r && /* @__PURE__ */ s(
"button",
{
className: `voice-visualizer__btn-left ${N ? "voice-visualizer__btn-left-microphone" : ""}`,
onClick: M,
className: `voice-visualizer__btn-left ${M ? "voice-visualizer__btn-left-microphone" : ""}`,
onClick: L,
children: /* @__PURE__ */ s(
"img",
{
src: N ? xe : Ce,
alt: N ? "Play" : "Pause"
src: M ? He : $e,
alt: M ? "Play" : "Pause"
}

@@ -562,13 +564,13 @@ )

),
!a && /* @__PURE__ */ s(
!d && /* @__PURE__ */ s(
"button",
{
className: `voice-visualizer__btn-left ${r ? "voice-visualizer__visually-hidden" : ""}`,
onClick: M,
disabled: f,
onClick: L,
disabled: h,
children: /* @__PURE__ */ s(
"img",
{
src: I ? Ve : Ce,
alt: I ? "Play" : "Pause"
src: b ? Ke : $e,
alt: b ? "Play" : "Pause"
}

@@ -578,8 +580,8 @@ )

),
a && /* @__PURE__ */ s(
d && /* @__PURE__ */ s(
"button",
{
className: "voice-visualizer__btn-center",
onClick: b,
children: /* @__PURE__ */ s("img", { src: xe, alt: "Microphone" })
onClick: A,
children: /* @__PURE__ */ s("img", { src: He, alt: "Microphone" })
}

@@ -592,19 +594,20 @@ ),

onClick: u,
children: /* @__PURE__ */ s("img", { src: qe, alt: "Stop" })
children: /* @__PURE__ */ s("img", { src: et, alt: "Stop" })
}
),
!a && /* @__PURE__ */ s(
!d && /* @__PURE__ */ s(
"button",
{
onClick: p,
className: `voice-visualizer__btn ${ue ?? ""}`,
disabled: f,
onClick: a,
className: `voice-visualizer__btn ${me ?? ""}`,
disabled: h,
children: "Clear"
}
),
ne && n && /* @__PURE__ */ s(
ie && n && /* @__PURE__ */ s(
"button",
{
onClick: z,
className: `voice-visualizer__btn ${ue ?? ""}`,
onClick: g,
className: `voice-visualizer__btn ${me ?? ""}`,
disabled: h,
children: "Download Audio"

@@ -615,8 +618,8 @@ }

] }),
d && /* @__PURE__ */ s(
f && /* @__PURE__ */ s(
"audio",
{
ref: H,
src: i,
onTimeUpdate: h,
ref: Q,
src: c,
onTimeUpdate: O,
controls: !0,

@@ -629,96 +632,100 @@ style: { display: "none" }

);
function tt() {
const [e, r] = m(!1), [n, t] = m(!1), [i, o] = m(null), [d, M] = m(new Uint8Array(0)), [b, u] = m(!1), [z, S] = m(null), [I, N] = m(null), [f, a] = m(0), [p, h] = m(0), [R, Z] = m(0), [P, T] = m(""), [x, L] = m(!0), [F, O] = m(0), [J, re] = m(!0), [ne, U] = m(null), l = E(null), A = E(null), Y = E(null), Q = E(null), C = E(null), _ = E(null), V = E(null), v = E(null);
$(() => {
function it() {
const [e, r] = v(!1), [n, t] = v(!1), [c, o] = v(null), [w, L] = v(new Uint8Array(0)), [A, u] = v(!1), [g, S] = v(null), [f, b] = v(null), [M, h] = v(0), [d, a] = v(0), [P, O] = v(0), [H, $] = v(""), [q, I] = v(!0), [T, _] = v(0), [J, X] = v(!0), [U, ge] = v(!1), [ie, Y] = v(null), l = y(null), N = y(null), F = y(null), K = y(null), C = y(null), j = y(null), ee = y(null), m = y(null), he = !!(f && !A);
x(() => {
if (!e || n)
return;
const y = setInterval(() => {
const j = performance.now();
a((w) => w + (j - p)), h(j);
const D = setInterval(() => {
const z = performance.now();
h((R) => R + (z - d)), a(z);
}, 1e3);
return () => clearInterval(y);
}, [p, n, e]), $(() => {
if (!z || z.size === 0)
return () => clearInterval(D);
}, [d, n, e]), x(() => {
if (!g || g.size === 0)
return;
(async () => {
var y;
var D;
try {
U(null);
const j = new Blob([z], {
type: (y = l.current) == null ? void 0 : y.mimeType
}), w = URL.createObjectURL(j);
w && T(w);
const K = await z.arrayBuffer(), B = await new AudioContext().decodeAudioData(K);
N(B), Z(B.duration - 0.06), u(!1);
} catch (j) {
if (console.error("Error processing the audio blob:", j), j instanceof Error) {
U(j);
Y(null);
const z = new Blob([g], {
type: (D = l.current) == null ? void 0 : D.mimeType
}), R = URL.createObjectURL(z);
R && $(R);
const B = await g.arrayBuffer(), Z = await new AudioContext().decodeAudioData(B);
b(Z), O(Z.duration - 0.06);
} catch (z) {
if (console.error("Error processing the audio blob:", z), z instanceof Error) {
Y(z);
return;
}
U(new Error("Error processing the audio blob"));
Y(new Error("Error processing the audio blob"));
}
})();
}, [z]), $(() => {
if (ne) {
X();
}, [g]), x(() => {
if (ie) {
re();
return;
}
}, [ne]), $(() => () => {
V.current && cancelAnimationFrame(V.current), C.current && C.current.disconnect(), A.current && A.current.state !== "closed" && A.current.close(), _.current && cancelAnimationFrame(_.current), v != null && v.current && v.current.removeEventListener("ended", H), l.current && l.current.removeEventListener(
}, [ie]), x(() => () => {
ee.current && cancelAnimationFrame(ee.current), C.current && C.current.disconnect(), N.current && N.current.state !== "closed" && N.current.close(), j.current && cancelAnimationFrame(j.current), m != null && m.current && m.current.removeEventListener("ended", G), l.current && l.current.removeEventListener(
"dataavailable",
q
te
);
}, []);
const ze = () => {
navigator.mediaDevices.getUserMedia({ audio: !0 }).then((c) => {
X(), re(!1), h(performance.now()), r(!0), o(c), A.current = new window.AudioContext(), Y.current = A.current.createAnalyser(), Q.current = new Uint8Array(
Y.current.frequencyBinCount
), C.current = A.current.createMediaStreamSource(c), C.current.connect(Y.current), l.current = new MediaRecorder(c), l.current.addEventListener(
}, []), x(() => (!J && !U && window.addEventListener("beforeunload", ue), () => {
window.removeEventListener("beforeunload", ue);
}), [J, U]);
const ue = (i) => {
i.preventDefault(), i.returnValue = "";
}, Me = () => {
navigator.mediaDevices.getUserMedia({ audio: !0 }).then((i) => {
re(), X(!1), a(performance.now()), r(!0), o(i), N.current = new window.AudioContext(), F.current = N.current.createAnalyser(), K.current = new Uint8Array(
F.current.frequencyBinCount
), C.current = N.current.createMediaStreamSource(i), C.current.connect(F.current), l.current = new MediaRecorder(i), l.current.addEventListener(
"dataavailable",
q
), l.current.start(), ie();
}).catch((c) => {
if (console.error("Error starting audio recording:", c), c instanceof Error) {
U(c);
te
), l.current.start(), ce();
}).catch((i) => {
if (console.error("Error starting audio recording:", i), i instanceof Error) {
Y(i);
return;
}
U(new Error("Error starting audio recording"));
Y(new Error("Error starting audio recording"));
});
}, ie = () => {
Y.current.getByteTimeDomainData(Q.current), M(new Uint8Array(Q.current)), _.current = requestAnimationFrame(ie);
}, q = (c) => {
l.current && S(c.data);
}, he = () => {
v.current && (O(v.current.currentTime), V.current = requestAnimationFrame(he));
}, fe = () => {
e || ze();
}, ge = () => {
e && (u(!0), r(!1), a(0), t(!1), _.current && cancelAnimationFrame(_.current), C.current && C.current.disconnect(), A.current && A.current.state !== "closed" && A.current.close(), i == null || i.getTracks().forEach((c) => c.stop()), l.current && (l.current.stop(), l.current.removeEventListener(
}, ce = () => {
F.current.getByteTimeDomainData(K.current), L(new Uint8Array(K.current)), j.current = requestAnimationFrame(ce);
}, te = (i) => {
l.current && S(i.data);
}, le = () => {
m.current && (_(m.current.currentTime), ee.current = requestAnimationFrame(le));
}, Ie = () => {
e || Me();
}, pe = () => {
e && (u(!0), r(!1), h(0), t(!1), j.current && cancelAnimationFrame(j.current), C.current && C.current.disconnect(), N.current && N.current.state !== "closed" && N.current.close(), c == null || c.getTracks().forEach((i) => i.stop()), l.current && (l.current.stop(), l.current.removeEventListener(
"dataavailable",
q
te
)));
}, X = () => {
_.current && cancelAnimationFrame(_.current), v != null && v.current && v.current.removeEventListener("ended", H), l.current && (l.current.removeEventListener(
}, re = () => {
j.current && cancelAnimationFrame(j.current), m != null && m.current && m.current.removeEventListener("ended", G), l.current && (l.current.removeEventListener(
"dataavailable",
q
), l.current.stop(), l.current = null), i == null || i.getTracks().forEach((c) => c.stop()), l.current = null, A.current = null, Y.current = null, Q.current = null, C.current = null, _.current = null, V.current = null, o(null), r(!1), u(!1), S(null), N(null), a(0), h(0), Z(0), T(""), O(0), L(!0), t(!1), M(new Uint8Array(0)), U(null), re(!0);
}, Me = (c) => {
c instanceof Blob && (X(), re(!1), u(!0), r(!1), a(0), t(!1), S(c));
}, ue = () => {
var c, y, j, w, K, k, B, ee;
te
), l.current.stop(), l.current = null), c == null || c.getTracks().forEach((i) => i.stop()), l.current = null, N.current = null, F.current = null, K.current = null, C.current = null, j.current = null, ee.current = null, o(null), r(!1), u(!1), S(null), b(null), h(0), a(0), O(0), $(""), _(0), I(!0), t(!1), L(new Uint8Array(0)), Y(null), X(!0);
}, me = (i) => {
i instanceof Blob && (re(), ge(!0), X(!1), u(!0), r(!1), h(0), t(!1), S(i));
}, Q = () => {
var i, D, z, R, B, oe, Z, ve;
if (e) {
t((oe) => !oe), ((c = l.current) == null ? void 0 : c.state) === "recording" ? ((y = l.current) == null || y.pause(), a((oe) => oe + (performance.now() - p)), _.current && cancelAnimationFrame(_.current)) : ((j = l.current) == null || j.resume(), h(performance.now()), _.current = requestAnimationFrame(ie));
t((k) => !k), ((i = l.current) == null ? void 0 : i.state) === "recording" ? ((D = l.current) == null || D.pause(), h((k) => k + (performance.now() - d)), j.current && cancelAnimationFrame(j.current)) : ((z = l.current) == null || z.resume(), a(performance.now()), j.current = requestAnimationFrame(ce));
return;
}
v.current && I && ((w = v.current) != null && w.paused ? ((K = v.current) == null || K.addEventListener("ended", H), (k = v.current) == null || k.play(), L(!1)) : ((B = v.current) == null || B.removeEventListener("ended", H), (ee = v.current) == null || ee.pause(), L(!0)));
}, H = () => {
L(!0), v != null && v.current && (v.current.currentTime = 0, O(0));
}, ce = () => {
var y;
if (!P)
m.current && he && ((R = m.current) != null && R.paused ? ((B = m.current) == null || B.addEventListener("ended", G), (oe = m.current) == null || oe.play(), I(!1)) : ((Z = m.current) == null || Z.removeEventListener("ended", G), (ve = m.current) == null || ve.pause(), I(!0)));
}, G = () => {
I(!0), m != null && m.current && (m.current.currentTime = 0, _(0));
}, Le = () => {
var D;
if (!H)
return;
const c = document.createElement("a");
c.href = P, c.download = `recorded_audio${We(
(y = l.current) == null ? void 0 : y.mimeType
)}`, document.body.appendChild(c), c.click(), document.body.removeChild(c), URL.revokeObjectURL(P);
const i = document.createElement("a");
i.href = H, i.download = `recorded_audio${Ve(
(D = l.current) == null ? void 0 : D.mimeType
)}`, document.body.appendChild(i), i.click(), document.body.removeChild(i), URL.revokeObjectURL(H);
};

@@ -728,27 +735,30 @@ return {

isPausedRecording: n,
audioData: d,
recordingTime: f,
isProcessingRecordedAudio: b,
recordedBlob: z,
audioData: w,
recordingTime: M,
isProcessingRecordedAudio: A,
recordedBlob: g,
mediaRecorder: l.current,
duration: R,
currentAudioTime: F,
audioSrc: P,
isPausedRecordedAudio: x,
bufferFromRecordedBlob: I,
duration: P,
currentAudioTime: T,
audioSrc: H,
isPausedRecordedAudio: q,
bufferFromRecordedBlob: f,
isCleared: J,
setPreloadedAudioBlob: Me,
startRecording: fe,
togglePauseResume: ue,
stopRecording: ge,
saveAudioFile: ce,
clearCanvas: X,
error: ne,
_handleTimeUpdate: he,
audioRef: v
isAvailableRecordedAudio: he,
isPreloadedBlob: U,
setPreloadedAudioBlob: me,
startRecording: Ie,
togglePauseResume: Q,
stopRecording: pe,
saveAudioFile: Le,
clearCanvas: re,
error: ie,
_setIsProcessingRecordedAudio: u,
_handleTimeUpdate: le,
audioRef: m
};
}
export {
et as VoiceVisualizer,
tt as useVoiceVisualizer
nt as VoiceVisualizer,
it as useVoiceVisualizer
};

@@ -1,2 +0,2 @@

import { MutableRefObject } from "react";
import { Dispatch, MutableRefObject, SetStateAction } from "react";
export interface BarItem {

@@ -18,2 +18,4 @@ startY: number;

isCleared: boolean;
isAvailableRecordedAudio: boolean;
isPreloadedBlob: boolean;
setPreloadedAudioBlob: (blob: unknown) => void;

@@ -28,2 +30,3 @@ recordedBlob: Blob | null;

error: Error | null;
_setIsProcessingRecordedAudio: Dispatch<SetStateAction<boolean>>;
_handleTimeUpdate: () => void;

@@ -85,1 +88,8 @@ audioRef: MutableRefObject<HTMLAudioElement | null>;

}
export type GetBarsDataParams = {
buffer: AudioBuffer;
height: number;
width: number;
barWidth: number;
gap: number;
};
{
"name": "react-voice-visualizer",
"private": false,
"version": "1.0.14",
"version": "1.1.0",
"type": "module",

@@ -6,0 +6,0 @@ "author": "Yurii Zarytskyi",

@@ -98,2 +98,3 @@ # react-voice-visualizer [Demo App](https://react-voice-visualizer.vercel.app/)

setPreloadedAudioBlob,
isPreloadedBlob,
error,

@@ -119,3 +120,7 @@ audioRef

return (
<VoiceVisualizer controls={recorderControls} ref={audioRef}/>
<VoiceVisualizer
isControlPanelShown={false} // Set to 'false' in most cases, but should be determined based on the specific user's use case.
controls={recorderControls}
ref={audioRef}
/>
);

@@ -151,26 +156,29 @@ };

| Returns | Type | Description |
| :-------------------------------- |:----------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `isRecordingInProgress` | `boolean` | Indicates if audio recording is currently in progress. |
| `isPausedRecording` | `boolean` | Indicates if audio recording is currently paused. |
| `audioData` | `Uint8Array` | Audio data for real-time visualization. |
| `recordingTime` | `number` | Elapsed time during recording in seconds. |
| `mediaRecorder` | `MediaRecorder \| null` | MediaRecorder instance used for recording audio. |
| `duration` | `number` | Duration of the recorded audio in seconds. |
| `currentAudioTime` | `number` | Current playback time of the recorded audio. |
| `audioSrc` | `string` | Source URL of the recorded audio file for playback. |
| `isPausedRecordedAudio` | `boolean` | Indicates if recorded audio playback is paused. |
| `isProcessingRecordedAudio` | `boolean` | Indicates if the recorded audio is being processed. |
| `isCleared` | `boolean` | Indicates if the canvas has been cleared. |
| `recordedBlob` | `Blob \| null` | Recorded audio data in Blob format. |
| `bufferFromRecordedBlob` | `AudioBuffer \| null` | Audio buffer from the recorded Blob. |
| `setPreloadedAudioBlob` | `(audioBlob: Blob) => void` | This function allows you to load an existing audio blob for further processing, playback and visualization. The `audioBlob` parameter represents the recorded audio data stored in a Blob format. |
| `startRecording` | `() => void` | Function to start audio recording. |
| `togglePauseResume` | `() => void` | Function to toggle pause/resume during recording and playback of recorded audio. |
| `stopRecording` | `() => void` | Function to stop audio recording. |
| `saveAudioFile` | `() => void` | This function allows you to save the recorded audio as a `webm` file format. Please note that it supports saving audio only in the webm format. If you need to save the audio in a different format, you can use external libraries like FFmpeg to convert the Blob to your desired format. This flexibility allows you to tailor the output format according to your specific needs. |
| `clearCanvas` | `() => void` | Function to clear the visualization canvas. |
| `error` | `Error \| null` | Error object if any error occurred during recording or playback. |
| `_handleTimeUpdate` | `() => void` | Internal function to handle audio time updates during playback. |
| `audioRef` | `MutableRefObject`<br/>`<HTMLAudioElement \| null>` | Reference to the audio element used for playback. |
| Returns | Type | Description |
|:--------------------------------------------------------------|:----------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `isRecordingInProgress` | `boolean` | Indicates if audio recording is currently in progress. |
| `isPausedRecording` | `boolean` | Indicates if audio recording is currently paused. |
| `audioData` | `Uint8Array` | Audio data for real-time visualization. |
| `recordingTime` | `number` | Elapsed time during recording in seconds. |
| `mediaRecorder` | `MediaRecorder \| null` | MediaRecorder instance used for recording audio. |
| `duration` | `number` | Duration of the recorded audio in seconds. |
| `currentAudioTime` | `number` | Current playback time of the recorded audio. |
| `audioSrc` | `string` | Source URL of the recorded audio file for playback. |
| `isPausedRecordedAudio` | `boolean` | Indicates if recorded audio playback is paused. |
| `isProcessingRecordedAudio` | `boolean` | Indicates if the recorded audio is being processed. |
| `isCleared` | `boolean` | Indicates if the canvas has been cleared. |
| `isPreloadedBlob` | `boolean` | Indicates whether a blob of recorded audio data has been preloaded. |
| `isAvailableRecordedAudio` | `boolean` | Indicates whether recorded audi is available and not currently being processed. This return value can be used to check if it's an appropriate time to work with recorded audio data in your application. |
| `recordedBlob` | `Blob \| null` | Recorded audio data in Blob format. |
| `bufferFromRecordedBlob` | `AudioBuffer \| null` | Audio buffer from the recorded Blob. |
| `setPreloadedAudioBlob` | `(audioBlob: Blob) => void` | This function allows you to load an existing audio blob for further processing, playback and visualization. The `audioBlob` parameter represents the recorded audio data stored in a Blob format. |
| `startRecording` | `() => void` | Function to start audio recording. |
| `togglePauseResume` | `() => void` | Function to toggle pause/resume during recording and playback of recorded audio. |
| `stopRecording` | `() => void` | Function to stop audio recording. |
| `saveAudioFile` | `() => void` | This function allows you to save the recorded audio as a `webm` file format. Please note that it supports saving audio only in the webm format. If you need to save the audio in a different format, you can use external libraries like FFmpeg to convert the Blob to your desired format. This flexibility allows you to tailor the output format according to your specific needs. |
| `clearCanvas` | `() => void` | Function to clear the visualization canvas. |
| `error` | `Error \| null` | Error object if any error occurred during recording or playback. |
| `_setIsProcessingRecordedAudio` | `Dispatch<SetStateAction<boolean>>` | (**Do not use!**) Internal function to set IsProcessingRecordedAudio state. |
| `_handleTimeUpdate` | `() => void` | (**Do not use!**) Internal function to handle audio time updates during playback. |
| `audioRef` | `MutableRefObject`<br/>`<HTMLAudioElement \| null>` | Reference to the audio element used for playback. |

@@ -187,3 +195,3 @@ #### Load and visualize any Audio

A component that visualizes the real-time audio audio wave during recording.
A component that visualizes the real-time audio wave during recording.

@@ -211,2 +219,3 @@ ### Props for AudioVisualizer Component

| **`isDefaultUIShown`** | Whether to show a default UI on Canvas before recording. If you want to create your own UI, set it to false. | `true` | `boolean` (Optional) |
| **`mainContainerClassName`** | The CSS class name for the main container. | - | `string` (Optional) |
| **`canvasContainerClassName`** | The CSS class name for the container of the visualization canvas. | - | `string` (Optional) |

@@ -213,0 +222,0 @@ | **`isProgressIndicatorShown`** | Whether to show the progress indicator after recording. | `true` | `boolean` (Optional) |

Sorry, the diff of this file is not supported yet