You're Invited:Meet the Socket Team at BlackHat and DEF CON in Las Vegas, Aug 4-6.RSVP
Socket
Book a DemoInstallSign in
Socket

react-voice-visualizer

Package Overview
Dependencies
Maintainers
1
Versions
44
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

react-voice-visualizer - npm Package Compare versions

Comparing version

to
1.7.5

228

dist/react-voice-visualizer.js

@@ -35,7 +35,7 @@ (function(){"use strict";(e=>{try{if(typeof window>"u")return;var i=document.createElement("style");i.appendChild(document.createTextNode(e)),document.head.appendChild(i)}catch(o){console.error("vite-plugin-css-injected-by-js",o)}})(".voice-visualizer__buttons-container{display:flex;justify-content:center;align-items:center;column-gap:20px;row-gap:15px;flex-wrap:wrap;margin-bottom:40px}.voice-visualizer__btn-center{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#fff;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s}.voice-visualizer__btn-center:hover{background-color:#eaeaea}.voice-visualizer__btn-center>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause{background-color:#ff3030}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover{background-color:#ff4f4f}.voice-visualizer__btn-center.voice-visualizer__btn-center-pause>img{height:50%;max-height:16px}.voice-visualizer__btn-center:hover{border:4px solid #9f9f9f}.voice-visualizer__btn-left{box-sizing:border-box;flex-shrink:0;width:60px;height:60px;padding:0;display:flex;justify-content:center;align-items:center;border-radius:50%;background-color:#ff3030;border:4px solid #c5c5c5;outline:none;cursor:pointer;transition:border-color .3s,background-color .3s,opacity .3s}.voice-visualizer__btn-left:hover{background-color:#ff4f4f}.voice-visualizer__btn-left:disabled{opacity:.6;background-color:#ff3030}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone{background-color:#fff}.voice-visualizer__btn-left.voice-visualizer__btn-left-microphone>img{width:auto;height:50%;max-height:30px}.voice-visualizer__btn-left>img{width:auto;height:50%;max-height:16px}.voice-visualizer__btn-left:hover{border:4px solid #9f9f9f}.voice-visualizer__btn{box-sizing:border-box;min-width:100px;min-height:60px;padding:5px 20px;border-radius:40px;font-size:15px;background-color:#f0f0f0;transition:background-color .3s,opacity .3s}.voice-visualizer__btn:disabled{opacity:.8;background-color:#f0f0f0}.voice-visualizer__btn:hover{background-color:#bebebe}.voice-visualizer__canvas-container{position:relative;width:fit-content;margin:0 auto;overflow:hidden}.voice-visualizer__canvas-container canvas{display:block}.voice-visualizer__canvas-microphone-btn{position:absolute;top:50%;left:50%;width:auto;max-width:12%;min-width:24px;height:50%;max-height:100px;background-color:transparent;border:none;outline:none;transform:translate(-50%,-50%)}.voice-visualizer__canvas-microphone-icon{width:100%;height:100%;will-change:transform;transition:transform .3s}.voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon{transform:scale(1.03)}.voice-visualizer__canvas-audio-wave-icon{position:absolute;top:50%;left:50%;width:auto;max-width:40%;height:40%;max-height:100px;transform:translate(-118%,-50%) scale(-1)}.voice-visualizer__canvas-audio-wave-icon2{transform:translate(18%,-50%)}.voice-visualizer__canvas-audio-processing{position:absolute;top:50%;left:50%;margin:0;transform:translate(-50%,-50%)}.voice-visualizer__progress-indicator-hovered{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#85858599}.voice-visualizer__progress-indicator-hovered-time{position:absolute;top:3%;left:1px;width:fit-content;margin:0;padding:0 7px;opacity:.8;font-size:12px;border-radius:0 4px 4px 0;background-color:#575757;text-align:left}.voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__progress-indicator{position:absolute;top:0;pointer-events:none;height:100%;width:1px;background-color:#efefef}.voice-visualizer__progress-indicator-time{position:absolute;top:3%;left:1px;width:fit-content;box-sizing:border-box;min-width:37px;margin:0;padding:0 7px;font-size:12px;border-radius:0 4px 4px 0;text-align:left;color:#000;font-weight:500;background-color:#efefef}.voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left{left:unset;right:1px;border-radius:4px 0 0 4px}.voice-visualizer__audio-info-container{box-sizing:border-box;height:55px;display:flex;align-items:center;justify-content:center;gap:30px}.voice-visualizer__audio-info-time{margin:15px 0;min-width:38px;text-align:left}.voice-visualizer__visually-hidden{position:absolute;width:1px;height:1px;margin:-1px;padding:0;border:4px solid #c5c5c5;white-space:nowrap;clip-path:inset(100%);clip:rect(0 0 0 0);overflow:hidden}")})();

return;
const { context: z, height: A } = d, S = m / v;
const { context: f, height: A } = d, S = m / v;
e.forEach((o, g) => {
const b = g / e.length, h = S > b;
je({
context: z,
context: f,
color: h ? M : u,

@@ -80,3 +80,3 @@ rounded: I,

mainBarColor: d,
secondaryBarColor: z,
secondaryBarColor: f,
rounded: A,

@@ -95,5 +95,5 @@ animateCurrentPick: S,

r.current = 0;
const j = (h - $ / 258 * h) / h * 100, ee = (-h + $ / 258 * h * 2) / h * 100, F = n.current === v ? {
const j = (h - $ / 258 * h) / h * 100, K = (-h + $ / 258 * h * 2) / h * 100, F = n.current === v ? {
startY: j,
barHeight: ee
barHeight: K
} : null;

@@ -130,3 +130,3 @@ n.current >= t ? n.current = v : n.current += v, m.length > (o ? H : _) / v && m.pop(), m.unshift(F);

context: b,
color: z,
color: f,
rounded: A,

@@ -177,9 +177,9 @@ width: H,

for (let d = 0; d < u; d++) {
const z = [];
const f = [];
let A = 0;
for (let o = 0; o < M && d * M + o < e.length; o++) {
const g = e[d * M + o];
g > 0 && (z.push(g), A++);
g > 0 && (f.push(g), A++);
}
const S = z.reduce((o, g) => o + g, 0) / A;
const S = f.reduce((o, g) => o + g, 0) / A;
S > v && (v = S), I.push({ max: S });

@@ -189,4 +189,4 @@ }

const d = m * 0.95 / v;
I = I.map((z) => ({
max: z.max > 0.01 ? z.max * d : 1
I = I.map((f) => ({
max: f.max > 0.01 ? f.max * d : 1
}));

@@ -311,3 +311,3 @@ }

saveAudioFile: d,
isAvailableRecordedAudio: z,
isAvailableRecordedAudio: f,
isPausedRecordedAudio: A,

@@ -326,3 +326,3 @@ isPausedRecording: S,

width: j = "100%",
height: ee = 200,
height: K = 200,
speed: F = 3,

@@ -332,8 +332,8 @@ backgroundColor: O = "transparent",

secondaryBarColor: B = "#5e5e5e",
barWidth: te = 2,
barWidth: ee = 2,
gap: k = 1,
rounded: J = 5,
isControlPanelShown: ye = !0,
isDownloadAudioButtonShown: re = !1,
animateCurrentPick: ne = !0,
isDownloadAudioButtonShown: te = !1,
animateCurrentPick: re = !0,
fullscreen: Y = !1,

@@ -343,10 +343,10 @@ onlyRecording: G = !1,

defaultMicrophoneIconColor: De = D,
defaultAudioWaveIconColor: ge = D,
mainContainerClassName: le,
canvasContainerClassName: Q,
defaultAudioWaveIconColor: le = D,
mainContainerClassName: me,
canvasContainerClassName: ne,
isProgressIndicatorShown: p = !G,
progressIndicatorClassName: R,
isProgressIndicatorTimeShown: V = !0,
isProgressIndicatorTimeShown: Q = !0,
progressIndicatorTimeClassName: ie,
isProgressIndicatorOnHoverShown: q = !G,
isProgressIndicatorOnHoverShown: V = !G,
progressIndicatorOnHoverClassName: C,

@@ -359,6 +359,6 @@ isProgressIndicatorTimeOnHoverShown: x = !0,

}, Ee) => {
const [me, pe] = l(0), [T, Ce] = l(0), [X, ve] = l(0), [ce, _e] = l(0), [oe, K] = l(!1), [we, Le] = l(window.innerWidth), [se, Se] = l(!1), i = we < 768, f = Math.trunc(F), E = Math.trunc(k), w = Math.trunc(
i && E > 0 ? te + 1 : te
), Ne = w + E * w, L = N(null), He = N([]), Ae = N(f), Je = N(w), Qe = N(w), de = N(null), P = Ee, Ve = dt(we), {
result: fe,
const [ve, pe] = l(0), [T, Ce] = l(0), [q, de] = l(0), [ce, _e] = l(0), [oe, X] = l(!1), [we, Le] = l(window.innerWidth), [se, Se] = l(!1), i = we < 768, z = Math.trunc(F), E = Math.trunc(k), w = Math.trunc(
i && E > 0 ? ee + 1 : ee
), Ne = w + E * w, L = N(null), He = N([]), Ae = N(z), Je = N(w), Qe = N(w), fe = N(null), P = Ee, Ve = dt(we), {
result: ze,
setResult: qe,

@@ -374,3 +374,3 @@ run: Xe

const y = () => {
Ve.current !== window.innerWidth && (z ? (Le(window.innerWidth), U(!0), Se(!0), Ke()) : (Le(window.innerWidth), xe()));
Ve.current !== window.innerWidth && (f ? (Le(window.innerWidth), U(!0), Se(!0), Ke()) : (Le(window.innerWidth), xe()));
};

@@ -380,4 +380,4 @@ return window.addEventListener("resize", y), () => {

};
}, [j, z]), Be(() => {
L.current && ((Ae.current >= f || !e.length) && (Ae.current = 0, ot({
}, [j, f]), Be(() => {
L.current && ((Ae.current >= z || !e.length) && (Ae.current = 0, ot({
audioData: e,

@@ -396,3 +396,3 @@ unit: Ne,

rounded: J,
animateCurrentPick: ne,
animateCurrentPick: re,
fullscreen: Y

@@ -413,6 +413,6 @@ })), Ae.current += 1);

var y, W;
if (z)
if (f)
return oe ? (y = L.current) == null || y.addEventListener("mouseleave", Oe) : (W = L.current) == null || W.addEventListener("mouseenter", $e), () => {
var ze, Ue;
oe ? (ze = L.current) == null || ze.removeEventListener(
var ge, Ue;
oe ? (ge = L.current) == null || ge.removeEventListener(
"mouseleave",

@@ -425,3 +425,3 @@ Oe

};
}, [oe, z]), Z(() => {
}, [oe, f]), Z(() => {
var W;

@@ -438,3 +438,3 @@ if (!M || !L.current || t || se)

bufferData: y,
height: X,
height: q,
width: ce,

@@ -447,4 +447,4 @@ barWidth: w,

), () => {
var ze;
(ze = L.current) == null || ze.removeEventListener(
var ge;
(ge = L.current) == null || ge.removeEventListener(
"mousemove",

@@ -457,8 +457,8 @@ Re

T,
X,
q,
k,
te,
ee,
se
]), Z(() => {
if (!(G || !(fe != null && fe.length) || !L.current || o)) {
if (!(G || !(ze != null && ze.length) || !L.current || o)) {
if (g) {

@@ -469,3 +469,3 @@ qe([]);

it({
barsData: fe,
barsData: ze,
canvas: L.current,

@@ -483,3 +483,3 @@ barWidth: w,

}, [
fe,
ze,
c,

@@ -498,11 +498,11 @@ g,

function xe() {
if (!de.current || !L.current)
if (!fe.current || !L.current)
return;
Ae.current = f;
Ae.current = z;
const y = Math.trunc(
de.current.clientHeight * window.devicePixelRatio / 2
fe.current.clientHeight * window.devicePixelRatio / 2
) * 2;
Ce(de.current.clientWidth), ve(y), _e(
Ce(fe.current.clientWidth), de(y), _e(
Math.round(
de.current.clientWidth * window.devicePixelRatio
fe.current.clientWidth * window.devicePixelRatio
)

@@ -515,5 +515,5 @@ ), Se(!1);

const $e = () => {
K(!0);
X(!0);
}, Oe = () => {
K(!1);
X(!1);
}, Re = (y) => {

@@ -527,8 +527,8 @@ pe(y.offsetX);

}, Ze = c / r * T;
return /* @__PURE__ */ ae("div", { className: `voice-visualizer ${le ?? ""}`, children: [
return /* @__PURE__ */ ae("div", { className: `voice-visualizer ${me ?? ""}`, children: [
/* @__PURE__ */ ae(
"div",
{
className: `voice-visualizer__canvas-container ${Q ?? ""}`,
ref: de,
className: `voice-visualizer__canvas-container ${ne ?? ""}`,
ref: fe,
style: { width: Ye(j) },

@@ -541,6 +541,6 @@ children: [

width: ce,
height: X,
height: q,
onClick: tt,
style: {
height: Ye(ee),
height: Ye(K),
width: T

@@ -552,4 +552,4 @@ },

he && g && /* @__PURE__ */ ae(Fe, { children: [
/* @__PURE__ */ a(Ge, { color: ge }),
/* @__PURE__ */ a(Ge, { color: ge, reflect: !0 }),
/* @__PURE__ */ a(Ge, { color: le }),
/* @__PURE__ */ a(Ge, { color: le, reflect: !0 }),
/* @__PURE__ */ a(

@@ -579,3 +579,3 @@ "button",

),
oe && z && !o && !i && q && /* @__PURE__ */ a(
oe && f && !o && !i && V && /* @__PURE__ */ a(
"div",

@@ -585,3 +585,3 @@ {

style: {
left: me
left: ve
},

@@ -592,6 +592,6 @@ children: x && /* @__PURE__ */ a(

className: `voice-visualizer__progress-indicator-hovered-time
${T - me < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""}
${T - ve < 70 ? "voice-visualizer__progress-indicator-hovered-time-left" : ""}
${s ?? ""}`,
children: ke(
r / T * me
r / T * ve
)

@@ -602,3 +602,3 @@ }

),
p && z && !o && r ? /* @__PURE__ */ a(
p && f && !o && r ? /* @__PURE__ */ a(
"div",

@@ -610,3 +610,3 @@ {

},
children: V && /* @__PURE__ */ a(
children: Q && /* @__PURE__ */ a(
"p",

@@ -683,3 +683,3 @@ {

),
re && n && /* @__PURE__ */ a(
te && n && /* @__PURE__ */ a(
"button",

@@ -710,18 +710,18 @@ {

} = {}) {
const [d, z] = l(!1), [A, S] = l(!1), [o, g] = l(null), [b, h] = l(new Uint8Array(0)), [H, _] = l(!1), [ue, $] = l(null), [U, j] = l(null), [ee, F] = l(0), [O, D] = l(0), [B, te] = l(0), [k, J] = l(""), [ye, re] = l(!0), [ne, Y] = l(0), [G, he] = l(!0), [De, ge] = l(!1), [le, Q] = l(null), p = N(null), R = N(null), V = N(null), ie = N(null), q = N(null), C = N(null), x = N(null), s = N(null), Me = !!(U && !H), Te = ht(B), Ie = st(ee), Ee = ke(ne), me = De || H;
const [d, f] = l(!1), [A, S] = l(!1), [o, g] = l(null), [b, h] = l(new Uint8Array(0)), [H, _] = l(!1), [ue, $] = l(null), [U, j] = l(null), [K, F] = l(0), [O, D] = l(0), [B, ee] = l(0), [k, J] = l(""), [ye, te] = l(!0), [re, Y] = l(0), [G, he] = l(!0), [De, le] = l(!1), [me, ne] = l(null), p = N(null), R = N(null), Q = N(null), ie = N(null), V = N(null), C = N(null), x = N(null), s = N(null), Me = !!(U && !H), Te = ht(B), Ie = st(K), Ee = ke(re), ve = De || H;
Z(() => {
if (!d || A)
return;
const f = setInterval(() => {
const z = setInterval(() => {
const E = performance.now();
F((w) => w + (E - O)), D(E);
}, 1e3);
return () => clearInterval(f);
return () => clearInterval(z);
}, [O, A, d]), Z(() => {
if (le) {
K();
if (me) {
X();
return;
}
}, [le]), Z(() => () => {
K();
}, [me]), Z(() => () => {
X();
}, []), Z(() => (G || window.addEventListener("beforeunload", pe), () => {

@@ -737,50 +737,48 @@ window.removeEventListener("beforeunload", pe);

throw new Error("Error: The audio blob is empty");
const f = URL.createObjectURL(i);
J(f);
const z = URL.createObjectURL(i);
J(z);
const E = await i.arrayBuffer(), Ne = await new AudioContext().decodeAudioData(E);
j(Ne), te(Ne.duration - 0.06), Q(null);
} catch (f) {
console.error("Error processing the audio blob:", f), Q(
f instanceof Error ? f : new Error("Error processing the audio blob")
j(Ne), ee(Ne.duration - 0.06), ne(null);
} catch (z) {
console.error("Error processing the audio blob:", z), ne(
z instanceof Error ? z : new Error("Error processing the audio blob")
);
}
}, Ce = () => {
navigator.mediaDevices.getUserMedia({ audio: !0 }).then((i) => {
D(performance.now()), z(!0), g(i), R.current = new window.AudioContext(), V.current = R.current.createAnalyser(), ie.current = new Uint8Array(
V.current.frequencyBinCount
), q.current = R.current.createMediaStreamSource(i), q.current.connect(V.current), p.current = new MediaRecorder(i), p.current.addEventListener(
f(!0), navigator.mediaDevices.getUserMedia({ audio: !0 }).then((i) => {
D(performance.now()), g(i), R.current = new window.AudioContext(), Q.current = R.current.createAnalyser(), ie.current = new Uint8Array(
Q.current.frequencyBinCount
), V.current = R.current.createMediaStreamSource(i), V.current.connect(Q.current), p.current = new MediaRecorder(i), p.current.addEventListener(
"dataavailable",
ve
), p.current.start(), X();
de
), p.current.start(), q();
}).catch((i) => {
if (console.error("Error starting audio recording:", i), i instanceof Error) {
Q(i);
return;
}
Q(new Error("Error starting audio recording"));
f(!1), ne(
i instanceof Error ? i : new Error("Error starting audio recording")
);
});
}, X = () => {
V.current.getByteTimeDomainData(ie.current), h(new Uint8Array(ie.current)), C.current = requestAnimationFrame(X);
}, ve = (i) => {
s.current = new Audio(), $(i.data), T(i.data);
}, q = () => {
Q.current.getByteTimeDomainData(ie.current), h(new Uint8Array(ie.current)), C.current = requestAnimationFrame(q);
}, de = (i) => {
p.current && (p.current = null, s.current = new Audio(), $(i.data), T(i.data));
}, ce = () => {
s.current && (Y(s.current.currentTime), x.current = requestAnimationFrame(ce));
}, _e = () => {
K(), he(!1), !d && (e && e(), Ce());
d || (X(), Ce(), he(!1), e && e());
}, oe = () => {
d && (p.current && (p.current.stop(), p.current.removeEventListener(
d && (f(!1), p.current && (p.current.stop(), p.current.removeEventListener(
"dataavailable",
ve
), p.current = null), o == null || o.getTracks().forEach((i) => i.stop()), t && t(), C.current && cancelAnimationFrame(C.current), q.current && q.current.disconnect(), R.current && R.current.state !== "closed" && R.current.close(), _(!0), z(!1), F(0), S(!1));
}, K = () => {
C.current && cancelAnimationFrame(C.current), x.current && cancelAnimationFrame(x.current), p.current && (p.current.removeEventListener(
de
)), o == null || o.getTracks().forEach((i) => i.stop()), C.current && cancelAnimationFrame(C.current), V.current && V.current.disconnect(), R.current && R.current.state !== "closed" && R.current.close(), _(!0), F(0), S(!1), t && t());
}, X = () => {
C.current && (cancelAnimationFrame(C.current), C.current = null), x.current && (cancelAnimationFrame(x.current), x.current = null), p.current && (p.current.removeEventListener(
"dataavailable",
ve
), p.current.stop(), p.current = null), o == null || o.getTracks().forEach((i) => i.stop()), s != null && s.current && (s.current.removeEventListener("ended", se), s.current.pause(), s.current.src = "", s.current = null), p.current = null, R.current = null, V.current = null, ie.current = null, q.current = null, C.current = null, x.current = null, c && c(), g(null), z(!1), _(!1), $(null), j(null), F(0), D(0), te(0), J(""), Y(0), re(!0), S(!1), h(new Uint8Array(0)), Q(null), he(!0);
de
), p.current.stop(), p.current = null), o == null || o.getTracks().forEach((i) => i.stop()), s != null && s.current && (s.current.removeEventListener("ended", se), s.current.pause(), s.current.src = "", s.current = null), R.current = null, Q.current = null, ie.current = null, V.current = null, g(null), f(!1), _(!1), $(null), j(null), F(0), D(0), ee(0), J(""), Y(0), te(!0), S(!1), le(!1), h(new Uint8Array(0)), ne(null), he(!0), c && c();
}, we = () => {
if (s.current && s.current.paused) {
const i = s.current.play();
i !== void 0 && i.catch((f) => {
console.error(f), v && v(
f instanceof Error ? f : new Error("Error playing audio")
i !== void 0 && i.catch((z) => {
console.error(z), v && v(
z instanceof Error ? z : new Error("Error playing audio")
);

@@ -790,5 +788,5 @@ });

}, Le = () => {
var i, f, E;
var i, z, E;
if (d) {
S((w) => !w), ((i = p.current) == null ? void 0 : i.state) === "recording" ? (n && n(), (f = p.current) == null || f.pause(), F((w) => w + (performance.now() - O)), C.current && cancelAnimationFrame(C.current)) : (r && r(), (E = p.current) == null || E.resume(), D(performance.now()), C.current = requestAnimationFrame(X));
S((w) => !w), ((i = p.current) == null ? void 0 : i.state) === "recording" ? ((z = p.current) == null || z.pause(), F((w) => w + (performance.now() - O)), C.current && cancelAnimationFrame(C.current), n && n()) : (C.current = requestAnimationFrame(q), (E = p.current) == null || E.resume(), D(performance.now()), r && r());
return;

@@ -798,12 +796,12 @@ }

if (s.current.paused)
M && ne === 0 && M(), I && ne !== 0 && I(), requestAnimationFrame(ce), s.current.addEventListener("ended", se), we(), re(!1);
requestAnimationFrame(ce), we(), s.current.addEventListener("ended", se), te(!1), M && re === 0 && M(), I && re !== 0 && I();
else {
x.current && cancelAnimationFrame(x.current), m && m(), s.current.removeEventListener("ended", se), s.current.pause(), re(!0);
x.current && cancelAnimationFrame(x.current), s.current.removeEventListener("ended", se), s.current.pause(), te(!0);
const w = s.current.currentTime;
Y(w), s.current.currentTime = w;
Y(w), s.current.currentTime = w, m && m();
}
}, se = () => {
x.current && cancelAnimationFrame(x.current), re(!0), u && u(), s != null && s.current && (s.current.currentTime = 0, Y(0));
x.current && cancelAnimationFrame(x.current), te(!0), s != null && s.current && (s.current.currentTime = 0, Y(0), u && u());
}, Se = () => {
var f;
var z;
if (!k)

@@ -813,3 +811,3 @@ return;

i.href = k, i.download = `recorded_audio${ut(
(f = p.current) == null ? void 0 : f.mimeType
(z = p.current) == null ? void 0 : z.mimeType
)}`, document.body.appendChild(i), i.click(), document.body.removeChild(i), URL.revokeObjectURL(k);

@@ -822,8 +820,8 @@ };

audioData: b,
recordingTime: ee,
isProcessingRecordedAudio: me,
recordingTime: K,
isProcessingRecordedAudio: ve,
recordedBlob: ue,
mediaRecorder: p.current,
duration: B,
currentAudioTime: ne,
currentAudioTime: re,
audioSrc: k,

@@ -841,7 +839,7 @@ isPausedRecordedAudio: ye,

saveAudioFile: Se,
clearCanvas: K,
clearCanvas: X,
setCurrentAudioTime: Y,
error: le,
error: me,
_setIsProcessingAudioOnComplete: _,
_setIsProcessingOnResize: ge
_setIsProcessingOnResize: le
};

@@ -848,0 +846,0 @@ }

{
"name": "react-voice-visualizer",
"private": false,
"version": "1.7.4",
"version": "1.7.5",
"type": "module",

@@ -6,0 +6,0 @@ "author": "Yurii Zarytskyi",

Sorry, the diff of this file is not supported yet