New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@11labs/client

Package Overview
Dependencies
Maintainers
1
Versions
15
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@11labs/client - npm Package Compare versions

Comparing version 0.0.2 to 0.0.3-beta.1

21

dist/index.d.ts
import { Input } from "./utils/input";
import { Output } from "./utils/output";
import { SessionConfig } from "./utils/connection";
export type { SocketEvent } from "./utils/events";
import { ClientToolCallEvent } from "./utils/events";
export type { IncomingSocketEvent } from "./utils/events";
export type { SessionConfig } from "./utils/connection";

@@ -9,3 +10,6 @@ export type Role = "user" | "ai";

export type Status = "connecting" | "connected" | "disconnecting" | "disconnected";
export type Options = SessionConfig & Callbacks;
export type Options = SessionConfig & Callbacks & ClientToolsConfig;
export type ClientToolsConfig = {
clientTools: Record<string, (parameters: any) => Promise<string | number | void> | string | number | void>;
};
export type Callbacks = {

@@ -15,3 +19,5 @@ onConnect: (props: {

}) => void;
onDebug: (props: any) => void;
onDisconnect: () => void;
onError: (message: string, context?: any) => void;
onMessage: (props: {

@@ -21,10 +27,9 @@ message: string;

}) => void;
onDebug: (props: any) => void;
onError: (message: string, context?: any) => void;
onModeChange: (prop: {
mode: Mode;
}) => void;
onStatusChange: (prop: {
status: Status;
}) => void;
onModeChange: (prop: {
mode: Mode;
}) => void;
onUnhandledClientToolCall?: (params: ClientToolCallEvent["client_tool_call"]) => void;
};

@@ -36,3 +41,3 @@ export declare class Conversation {

readonly output: Output;
static startSession(options: SessionConfig & Partial<Callbacks>): Promise<Conversation>;
static startSession(options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig>): Promise<Conversation>;
private lastInterruptTimestamp;

@@ -39,0 +44,0 @@ private mode;

@@ -1,2 +0,2 @@

function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)({}).hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},t.apply(null,arguments)}function e(t){const e=new Uint8Array(t);return window.btoa(String.fromCharCode(...e))}function n(t){const e=window.atob(t),n=e.length,s=new Uint8Array(n);for(let t=0;t<n;t++)s[t]=e.charCodeAt(t);return s.buffer}const s=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(s);class i{static async create(t){let e=null,n=null;try{const s=navigator.mediaDevices.getSupportedConstraints().sampleRate;e=new window.AudioContext(s?{sampleRate:t}:{});const o=e.createAnalyser();s||await e.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await e.audioWorklet.addModule(a),n=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}});const r=e.createMediaStreamSource(n),u=new AudioWorkletNode(e,"raw-audio-processor");return r.connect(o),o.connect(u),new i(e,o,u,n)}catch(t){var s,o;throw null==(s=n)||s.getTracks().forEach(t=>t.stop()),null==(o=e)||o.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=s}async close(){this.inputStream.getTracks().forEach(t=>t.stop()),await this.context.close()}}const o=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(o);class u{static async create(t){let e=null;try{e=new AudioContext({sampleRate:t});const n=e.createAnalyser(),s=e.createGain();s.connect(n),n.connect(e.destination),await e.audioWorklet.addModule(r);const a=new AudioWorkletNode(e,"audio-concat-processor");return a.connect(s),new u(e,n,s,a)}catch(t){var n;throw null==(n=e)||n.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=s}async close(){await this.context.close()}}function c(t){return!!t.type}class l{static async create(t){let e=null;try{var n;const s=null!=(n=t.origin)?n:"wss://api.elevenlabs.io",a=t.signedUrl?t.signedUrl:s+"/v1/convai/conversation?agent_id="+t.agentId,i=["convai"];t.authorization&&i.push(`bearer.${t.authorization}`),e=new WebSocket(a,i);const o=await new Promise((t,n)=>{e.addEventListener("error",n),e.addEventListener("close",n),e.addEventListener("message",e=>{const n=JSON.parse(e.data);c(n)&&("conversation_initiation_metadata"===n.type?t(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),r=o.conversation_id,u=parseInt(o.agent_output_audio_format.replace("pcm_",""));return new l(e,r,u)}catch(t){var s;throw null==(s=e)||s.close(),t}}constructor(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}close(){this.socket.close()}}const h={onConnect:()=>{},onDisconnect:()=>{},onError:()=>{},onDebug:()=>{},onMessage:()=>{},onStatusChange:()=>{},onModeChange:()=>{}};class p{static async startSession(e){const n=t({},h,e);n.onStatusChange({status:"connecting"});let s=null,a=null,o=null;try{return s=await i.create(16e3),a=await l.create(e),o=await u.create(a.sampleRate),new p(n,a,s,o)}catch(t){var r,c,d;throw n.onStatusChange({status:"disconnected"}),null==(r=a)||r.close(),await(null==(c=s)?void 0:c.close()),await(null==(d=o)?void 0:d.close()),t}}constructor(t,s,a,i){var o=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=async function(){"connected"===o.status&&(o.updateStatus("disconnecting"),o.connection.close(),await o.input.close(),await o.output.close(),o.updateStatus("disconnected"))},this.updateMode=t=>{t!==this.mode&&(this.mode=t,this.options.onModeChange({mode:t}))},this.updateStatus=t=>{t!==this.status&&(this.status=t,this.options.onStatusChange({status:t}))},this.onEvent=t=>{try{const e=JSON.parse(t.data);if(!c(e))return;switch(e.type){case"interruption":e.interruption_event&&(this.lastInterruptTimestamp=e.interruption_event.event_id),this.fadeOutAudio();break;case"agent_response":this.options.onMessage({source:"ai",message:e.agent_response_event.agent_response});break;case"user_transcript":this.options.onMessage({source:"user",message:e.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":this.options.onDebug({type:"tentative_agent_response",response:e.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":this.lastInterruptTimestamp<=e.audio_event.event_id&&(this.addAudioBase64Chunk(e.audio_event.audio_base_64),this.updateMode("speaking"));break;case"ping":this.connection.socket.send(JSON.stringify({type:"pong",event_id:e.ping_event.event_id}));break;default:this.options.onDebug(e)}}catch(e){return void this.onError("Failed to parse event data",{event:t})}},this.onInputWorkletMessage=t=>{const n=JSON.stringify({user_audio_chunk:e(t.data[0].buffer)});"connected"===this.status&&this.connection.socket.send(n)},this.onOutputWorkletMessage=({data:t})=>{"process"===t.type&&this.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(t){o.output.gain.gain.value=o.volume,o.output.worklet.port.postMessage({type:"clearInterrupted"}),o.output.worklet.port.postMessage({type:"buffer",buffer:n(t)})},this.fadeOutAudio=async function(){o.updateMode("listening"),o.output.worklet.port.postMessage({type:"interrupt"}),o.output.gain.gain.exponentialRampToValueAtTime(1e-4,o.output.context.currentTime+2),setTimeout(()=>{o.output.gain.gain.value=o.volume,o.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(t,e)=>{console.error(t,e),this.options.onError(t,e)},this.calculateVolume=t=>{if(0===t.length)return 0;let e=0;for(let n=0;n<t.length;n++)e+=t[n]/255;return e/=t.length,e<0?0:e>1?1:e},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:t})=>{this.volume=t},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.options=t,this.connection=s,this.input=a,this.output=i,this.options.onConnect({conversationId:s.conversationId}),this.connection.socket.addEventListener("message",t=>{this.onEvent(t)}),this.connection.socket.addEventListener("error",t=>{this.updateStatus("disconnected"),this.onError("Socket error",t)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}export{p as Conversation};
function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var s in n)({}).hasOwnProperty.call(n,s)&&(t[s]=n[s])}return t},t.apply(null,arguments)}function e(t){const e=new Uint8Array(t);return window.btoa(String.fromCharCode(...e))}function n(t){const e=window.atob(t),n=e.length,s=new Uint8Array(n);for(let t=0;t<n;t++)s[t]=e.charCodeAt(t);return s.buffer}const s=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(s);class a{static async create(t){let e=null,n=null;try{const s=navigator.mediaDevices.getSupportedConstraints().sampleRate;e=new window.AudioContext(s?{sampleRate:t}:{});const i=e.createAnalyser();s||await e.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await e.audioWorklet.addModule(o),n=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}});const r=e.createMediaStreamSource(n),l=new AudioWorkletNode(e,"raw-audio-processor");return r.connect(i),i.connect(l),new a(e,i,l,n)}catch(t){var s,i;throw null==(s=n)||s.getTracks().forEach(t=>t.stop()),null==(i=e)||i.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=s}async close(){this.inputStream.getTracks().forEach(t=>t.stop()),await this.context.close()}}const i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(i);class l{static async create(t){let e=null;try{e=new AudioContext({sampleRate:t});const n=e.createAnalyser(),s=e.createGain();s.connect(n),n.connect(e.destination),await e.audioWorklet.addModule(r);const o=new AudioWorkletNode(e,"audio-concat-processor");return o.connect(s),new l(e,n,s,o)}catch(t){var n;throw null==(n=e)||n.close(),t}}constructor(t,e,n,s){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=s}async close(){await this.context.close()}}function c(t){return!!t.type}class u{static async create(t){let e=null;try{var n;const s=null!=(n=t.origin)?n:"wss://api.elevenlabs.io",o=t.signedUrl?t.signedUrl:s+"/v1/convai/conversation?agent_id="+t.agentId,a=["convai"];t.authorization&&a.push(`bearer.${t.authorization}`),e=new WebSocket(o,a);const i=await new Promise((n,s)=>{e.addEventListener("open",()=>{if(t.overrides){var n,s,o,a,i;const r={type:"conversation_initiation_client_data",conversation_initiation_client_data:{custom_llm_extra_body:t.overrides.customLlmExtraBody,agent:{prompt:null==(n=t.overrides.agent)?void 0:n.prompt,first_message:null==(s=t.overrides.agent)?void 0:s.firstMessage,language:null==(o=t.overrides.agent)?void 0:o.language},tts:{voice_id:null==(a=t.overrides.tts)?void 0:a.voiceId}}};null==(i=e)||i.send(JSON.stringify(r))}},{once:!0}),e.addEventListener("error",s),e.addEventListener("close",s),e.addEventListener("message",t=>{const e=JSON.parse(t.data);c(e)&&("conversation_initiation_metadata"===e.type?n(e.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),r=i.conversation_id,l=parseInt(i.agent_output_audio_format.replace("pcm_",""));return new u(e,r,l)}catch(t){var s;throw null==(s=e)||s.close(),t}}constructor(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}close(){this.socket.close()}sendMessage(t){this.socket.send(JSON.stringify(t))}}const h={clientTools:{}},d={onConnect:()=>{},onDebug:()=>{},onDisconnect:()=>{},onError:()=>{},onMessage:()=>{},onModeChange:()=>{},onStatusChange:()=>{}};class p{static async startSession(e){const n=t({},h,d,e);n.onStatusChange({status:"connecting"});let s=null,o=null,i=null;try{return s=await a.create(16e3),o=await u.create(e),i=await l.create(o.sampleRate),new p(n,o,s,i)}catch(t){var r,c,f;throw n.onStatusChange({status:"disconnected"}),null==(r=o)||r.close(),await(null==(c=s)?void 0:c.close()),await(null==(f=i)?void 0:f.close()),t}}constructor(t,s,o,a){var i=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=async function(){"connected"===i.status&&(i.updateStatus("disconnecting"),i.connection.close(),await i.input.close(),await i.output.close(),i.updateStatus("disconnected"))},this.updateMode=t=>{t!==this.mode&&(this.mode=t,this.options.onModeChange({mode:t}))},this.updateStatus=t=>{t!==this.status&&(this.status=t,this.options.onStatusChange({status:t}))},this.onEvent=async function(t){try{const e=JSON.parse(t.data);if(!c(e))return;switch(e.type){case"interruption":e.interruption_event&&(i.lastInterruptTimestamp=e.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:e.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:e.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:e.tentative_agent_response_internal_event.tentative_agent_response});break;case"client_tool_call":if(i.options.clientTools.hasOwnProperty(e.client_tool_call.tool_name)){try{const t=await i.options.clientTools[e.client_tool_call.tool_name](e.client_tool_call.parameters);i.connection.sendMessage({tool_call_id:e.client_tool_call.tool_call_id,response:t,is_error:!1})}catch(t){i.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:e.client_tool_call.tool_name}),i.connection.sendMessage({tool_call_id:e.client_tool_call.tool_call_id,response:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})}break}if(i.options.onUnhandledClientToolCall){i.options.onUnhandledClientToolCall(e.client_tool_call);break}i.onError(`Client tool with name ${e.client_tool_call.tool_name} is not defined on client`,{clientToolName:e.client_tool_call.tool_name}),i.connection.sendMessage({tool_call_id:e.client_tool_call.tool_call_id,response:`Client tool with name ${e.client_tool_call.tool_name} is not defined on client`,is_error:!0});break;case"audio":i.lastInterruptTimestamp<=e.audio_event.event_id&&(i.addAudioBase64Chunk(e.audio_event.audio_base_64),i.updateMode("speaking"));break;case"ping":i.connection.sendMessage({type:"pong",event_id:e.ping_event.event_id});break;default:i.options.onDebug(e)}}catch(e){return void i.onError("Failed to parse event data",{event:t})}},this.onInputWorkletMessage=t=>{"connected"===this.status&&this.connection.sendMessage({user_audio_chunk:e(t.data[0].buffer)})},this.onOutputWorkletMessage=({data:t})=>{"process"===t.type&&this.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(t){i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"}),i.output.worklet.port.postMessage({type:"buffer",buffer:n(t)})},this.fadeOutAudio=async function(){i.updateMode("listening"),i.output.worklet.port.postMessage({type:"interrupt"}),i.output.gain.gain.exponentialRampToValueAtTime(1e-4,i.output.context.currentTime+2),setTimeout(()=>{i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(t,e)=>{console.error(t,e),this.options.onError(t,e)},this.calculateVolume=t=>{if(0===t.length)return 0;let e=0;for(let n=0;n<t.length;n++)e+=t[n]/255;return e/=t.length,e<0?0:e>1?1:e},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:t})=>{this.volume=t},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.options=t,this.connection=s,this.input=o,this.output=a,this.options.onConnect({conversationId:s.conversationId}),this.connection.socket.addEventListener("message",t=>{this.onEvent(t)}),this.connection.socket.addEventListener("error",t=>{this.updateStatus("disconnected"),this.onError("Socket error",t)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}export{p as Conversation};
//# sourceMappingURL=lib.modern.js.map

@@ -1,2 +0,2 @@

function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)({}).hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},e.apply(null,arguments)}function t(e){for(var t=window.atob(e),n=t.length,r=new Uint8Array(n),o=0;o<n;o++)r[o]=t.charCodeAt(o);return r.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(n),o=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=r}return e.create=function(t){try{var n=null,o=null;return Promise.resolve(function(s,i){try{var a=function(){function s(){return Promise.resolve(n.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var r=n.createMediaStreamSource(o=t),s=new AudioWorkletNode(n,"raw-audio-processor");return r.connect(a),a.connect(s),new e(n,a,s,o)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!i)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(s):s()}()}catch(e){return i(e)}return a&&a.then?a.then(void 0,i):a}(0,function(e){var t,r;throw null==(t=o)||t.getTracks().forEach(function(e){return e.stop()}),null==(r=n)||r.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),s=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),i=URL.createObjectURL(s),a=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=r}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(a=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(i)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,a,u,t)}))}catch(e){return o(e)}var a,u;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var s=(a=null!=(i=t.origin)?i:"wss://api.elevenlabs.io",c=t.signedUrl?t.signedUrl:a+"/v1/convai/conversation?agent_id="+t.agentId,l=["convai"],t.authorization&&l.push("bearer."+t.authorization),n=new WebSocket(c,l),Promise.resolve(new Promise(function(e,t){n.addEventListener("error",t),n.addEventListener("close",t),n.addEventListener("message",function(t){var n=JSON.parse(t.data);u(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var r=t.conversation_id,o=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,r,o)}))}catch(e){return o(e)}var i,a,c,l;return s&&s.then?s.then(void 0,o):s}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){this.socket.close()},e}(),l={onConnect:function(){},onDisconnect:function(){},onError:function(){},onDebug:function(){},onMessage:function(){},onStatusChange:function(){},onModeChange:function(){}},h=/*#__PURE__*/function(){function n(e,n,r,o){var s=this,i=this,a=this,c=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==i.status?Promise.resolve():(i.updateStatus("disconnecting"),i.connection.close(),Promise.resolve(i.input.close()).then(function(){return Promise.resolve(i.output.close()).then(function(){i.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==s.mode&&(s.mode=e,s.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==s.status&&(s.status=e,s.options.onStatusChange({status:e}))},this.onEvent=function(e){try{var t=JSON.parse(e.data);if(!u(t))return;switch(t.type){case"interruption":t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio();break;case"agent_response":s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response});break;case"user_transcript":s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.updateMode("speaking"));break;case"ping":s.connection.socket.send(JSON.stringify({type:"pong",event_id:t.ping_event.event_id}));break;default:s.options.onDebug(t)}}catch(t){return void s.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=function(e){var t,n,r=JSON.stringify({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))});"connected"===s.status&&s.connection.socket.send(r)},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&s.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return a.output.gain.gain.value=a.volume,a.output.worklet.port.postMessage({type:"clearInterrupted"}),a.output.worklet.port.postMessage({type:"buffer",buffer:t(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return c.updateMode("listening"),c.output.worklet.port.postMessage({type:"interrupt"}),c.output.gain.gain.exponentialRampToValueAtTime(1e-4,c.output.context.currentTime+2),setTimeout(function(){c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),s.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return s.connection.conversationId},this.setVolume=function(e){s.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=s.inputFrequencyData||(s.inputFrequencyData=new Uint8Array(s.input.analyser.frequencyBinCount)),s.input.analyser.getByteFrequencyData(s.inputFrequencyData),s.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=s.outputFrequencyData||(s.outputFrequencyData=new Uint8Array(s.output.analyser.frequencyBinCount)),s.output.analyser.getByteFrequencyData(s.outputFrequencyData),s.outputFrequencyData},this.getInputVolume=function(){return s.calculateVolume(s.getInputByteFrequencyData())},this.getOutputVolume=function(){return s.calculateVolume(s.getOutputByteFrequencyData())},this.options=e,this.connection=n,this.input=r,this.output=o,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(e){s.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){s.updateStatus("disconnected"),s.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){s.updateStatus("disconnected"),s.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(t){try{var r=e({},l,t);r.onStatusChange({status:"connecting"});var s=null,i=null,u=null;return Promise.resolve(function(e,l){try{var h=Promise.resolve(o.create(16e3)).then(function(e){return s=e,Promise.resolve(c.create(t)).then(function(e){return i=e,Promise.resolve(a.create(i.sampleRate)).then(function(e){return new n(r,i,s,u=e)})})})}catch(e){return l(e)}return h&&h.then?h.then(void 0,l):h}(0,function(e){var t,n;return r.onStatusChange({status:"disconnected"}),null==(t=i)||t.close(),Promise.resolve(null==(n=s)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=u)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},n}();export{h as Conversation};
function t(){return t=Object.assign?Object.assign.bind():function(t){for(var e=1;e<arguments.length;e++){var n=arguments[e];for(var o in n)({}).hasOwnProperty.call(n,o)&&(t[o]=n[o])}return t},t.apply(null,arguments)}function e(t){for(var e=window.atob(t),n=e.length,o=new Uint8Array(n),r=0;r<n;r++)o[r]=e.charCodeAt(r);return o.buffer}var n=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(n),r=/*#__PURE__*/function(){function t(t,e,n,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=t,this.analyser=e,this.worklet=n,this.inputStream=o}return t.create=function(e){try{var n=null,r=null;return Promise.resolve(function(i,s){try{var a=function(){function i(){return Promise.resolve(n.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:e},echoCancellation:{ideal:!0}}})).then(function(e){var o=n.createMediaStreamSource(r=e),i=new AudioWorkletNode(n,"raw-audio-processor");return o.connect(a),a.connect(i),new t(n,a,i,r)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(s?{sampleRate:e}:{})).createAnalyser(),u=function(){if(!s)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(i):i()}()}catch(t){return s(t)}return a&&a.then?a.then(void 0,s):a}(0,function(t){var e,o;throw null==(e=r)||e.getTracks().forEach(function(t){return t.stop()}),null==(o=n)||o.close(),t}))}catch(t){return Promise.reject(t)}},t.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(t){return t.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(t){return Promise.reject(t)}},t}(),i=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(i),a=/*#__PURE__*/function(){function t(t,e,n,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=t,this.analyser=e,this.gain=n,this.worklet=o}return t.create=function(e){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=(n=new AudioContext({sampleRate:e})).createAnalyser(),(u=n.createGain()).connect(a),a.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(s)).then(function(){var e=new AudioWorkletNode(n,"audio-concat-processor");return e.connect(u),new t(n,a,u,e)}))}catch(t){return r(t)}var a,u;return i&&i.then?i.then(void 0,r):i}(0,function(t){var e;throw null==(e=n)||e.close(),t}))}catch(t){return Promise.reject(t)}},t.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(t){return Promise.reject(t)}},t}();function u(t){return!!t.type}var c=/*#__PURE__*/function(){function t(t,e,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=t,this.conversationId=e,this.sampleRate=n}t.create=function(e){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=null!=(s=e.origin)?s:"wss://api.elevenlabs.io",c=e.signedUrl?e.signedUrl:a+"/v1/convai/conversation?agent_id="+e.agentId,l=["convai"],e.authorization&&l.push("bearer."+e.authorization),n=new WebSocket(c,l),Promise.resolve(new Promise(function(t,o){n.addEventListener("open",function(){if(e.overrides){var t,o,r,i,s,a={type:"conversation_initiation_client_data",conversation_initiation_client_data:{custom_llm_extra_body:e.overrides.customLlmExtraBody,agent:{prompt:null==(t=e.overrides.agent)?void 0:t.prompt,first_message:null==(o=e.overrides.agent)?void 0:o.firstMessage,language:null==(r=e.overrides.agent)?void 0:r.language},tts:{voice_id:null==(i=e.overrides.tts)?void 0:i.voiceId}}};null==(s=n)||s.send(JSON.stringify(a))}},{once:!0}),n.addEventListener("error",o),n.addEventListener("close",o),n.addEventListener("message",function(e){var n=JSON.parse(e.data);u(n)&&("conversation_initiation_metadata"===n.type?t(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(e){var o=e.conversation_id,r=parseInt(e.agent_output_audio_format.replace("pcm_",""));return new t(n,o,r)}))}catch(t){return r(t)}var s,a,c,l;return i&&i.then?i.then(void 0,r):i}(0,function(t){var e;throw null==(e=n)||e.close(),t}))}catch(t){return Promise.reject(t)}};var e=t.prototype;return e.close=function(){this.socket.close()},e.sendMessage=function(t){this.socket.send(JSON.stringify(t))},t}();function l(t,e){try{var n=t()}catch(t){return e(t)}return n&&n.then?n.then(void 0,e):n}function h(t,e,n){if(!t.s){if(n instanceof d){if(!n.s)return void(n.o=h.bind(null,t,e));1&e&&(e=n.s),n=n.v}if(n&&n.then)return void n.then(h.bind(null,t,e),h.bind(null,t,2));t.s=e,t.v=n;var o=t.o;o&&o(t)}}var f={clientTools:{}},d=/*#__PURE__*/function(){function t(){}return t.prototype.then=function(e,n){var o=new t,r=this.s;if(r){var i=1&r?e:n;if(i){try{h(o,1,i(this.v))}catch(t){h(o,2,t)}return o}return this}return this.o=function(t){try{var r=t.v;1&t.s?h(o,1,e?e(r):r):n?h(o,1,n(r)):h(o,2,r)}catch(t){h(o,2,t)}},o},t}(),p={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){}},v=/*#__PURE__*/function(){function n(t,n,o,r){var i=this,s=this,a=this,c=this,f=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==s.status?Promise.resolve():(s.updateStatus("disconnecting"),s.connection.close(),Promise.resolve(s.input.close()).then(function(){return Promise.resolve(s.output.close()).then(function(){s.updateStatus("disconnected")})}))}catch(t){return Promise.reject(t)}},this.updateMode=function(t){t!==i.mode&&(i.mode=t,i.options.onModeChange({mode:t}))},this.updateStatus=function(t){t!==i.status&&(i.status=t,i.options.onStatusChange({status:t}))},this.onEvent=function(t){try{return Promise.resolve(l(function(){var e,n=JSON.parse(t.data);if(u(n)){var o=function(t,e){var n,o=-1;t:{for(var r=0;r<e.length;r++){var i=e[r][0];if(i){var s=i();if(s&&s.then)break t;if(s===t){o=r;break}}else o=r}if(-1!==o){do{for(var a=e[o][1];!a;)o++,a=e[o][1];var u=a();if(u&&u.then){n=!0;break t}var c=e[o][2];o++}while(c&&!c());return u}}var l=new d,f=h.bind(null,l,2);return(n?u.then(p):s.then(function n(s){for(;;){if(s===t){o=r;break}if(++r===e.length){if(-1!==o)break;return void h(l,1,u)}if(i=e[r][0]){if((s=i())&&s.then)return void s.then(n).then(void 0,f)}else o=r}do{for(var a=e[o][1];!a;)o++,a=e[o][1];var u=a();if(u&&u.then)return void u.then(p).then(void 0,f);var c=e[o][2];o++}while(c&&!c());h(l,1,u)})).then(void 0,f),l;function p(t){for(;;){var n=e[o][2];if(!n||n())break;o++;for(var r=e[o][1];!r;)o++,r=e[o][1];if((t=r())&&t.then)return void t.then(p).then(void 0,f)}h(l,1,t)}}(n.type,[[function(){return"interruption"},function(){return n.interruption_event&&(a.lastInterruptTimestamp=n.interruption_event.event_id),a.fadeOutAudio(),void(e=1)}],[function(){return"agent_response"},function(){return a.options.onMessage({source:"ai",message:n.agent_response_event.agent_response}),void(e=1)}],[function(){return"user_transcript"},function(){return a.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript}),void(e=1)}],[function(){return"internal_tentative_agent_response"},function(){return a.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response}),void(e=1)}],[function(){return"client_tool_call"},function(){var t=function(){if(a.options.onUnhandledClientToolCall)return a.options.onUnhandledClientToolCall(n.client_tool_call),void(e=1);a.onError("Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({tool_call_id:n.client_tool_call.tool_call_id,response:"Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",is_error:!0}),e=1},o=function(){if(a.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){var t=function(){e=1},o=l(function(){return Promise.resolve(a.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters)).then(function(t){a.connection.sendMessage({tool_call_id:n.client_tool_call.tool_call_id,response:t,is_error:!1})})},function(t){a.onError("Client tool execution failed with following error: "+(null==t?void 0:t.message),{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({tool_call_id:n.client_tool_call.tool_call_id,response:"Client tool execution failed: "+(null==t?void 0:t.message),is_error:!0})});return o&&o.then?o.then(t):t()}}();return o&&o.then?o.then(t):t()},function(){return e||e}],[function(){return"audio"},function(){return a.lastInterruptTimestamp<=n.audio_event.event_id&&(a.addAudioBase64Chunk(n.audio_event.audio_base_64),a.updateMode("speaking")),void(e=1)}],[function(){return"ping"},function(){return a.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id}),void(e=1)}],[void 0,function(){return a.options.onDebug(n),void(e=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){a.onError("Failed to parse event data",{event:t})}))}catch(t){return Promise.reject(t)}},this.onInputWorkletMessage=function(t){var e,n;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(e=t.data[0].buffer,n=new Uint8Array(e),window.btoa(String.fromCharCode.apply(String,n)))})},this.onOutputWorkletMessage=function(t){var e=t.data;"process"===e.type&&i.updateMode(e.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(t){try{return c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"}),c.output.worklet.port.postMessage({type:"buffer",buffer:e(t)}),Promise.resolve()}catch(t){return Promise.reject(t)}},this.fadeOutAudio=function(){try{return f.updateMode("listening"),f.output.worklet.port.postMessage({type:"interrupt"}),f.output.gain.gain.exponentialRampToValueAtTime(1e-4,f.output.context.currentTime+2),setTimeout(function(){f.output.gain.gain.value=f.volume,f.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(t){return Promise.reject(t)}},this.onError=function(t,e){console.error(t,e),i.options.onError(t,e)},this.calculateVolume=function(t){if(0===t.length)return 0;for(var e=0,n=0;n<t.length;n++)e+=t[n]/255;return(e/=t.length)<0?0:e>1?1:e},this.getId=function(){return i.connection.conversationId},this.setVolume=function(t){i.volume=t.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.options=t,this.connection=n,this.input=o,this.output=r,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(t){i.onEvent(t)}),this.connection.socket.addEventListener("error",function(t){i.updateStatus("disconnected"),i.onError("Socket error",t)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return n.startSession=function(e){try{var o=t({},f,p,e);o.onStatusChange({status:"connecting"});var i=null,s=null,u=null;return Promise.resolve(l(function(){return Promise.resolve(r.create(16e3)).then(function(t){return i=t,Promise.resolve(c.create(e)).then(function(t){return s=t,Promise.resolve(a.create(s.sampleRate)).then(function(t){return new n(o,s,i,u=t)})})})},function(t){var e,n;return o.onStatusChange({status:"disconnected"}),null==(e=s)||e.close(),Promise.resolve(null==(n=i)?void 0:n.close()).then(function(){var e;return Promise.resolve(null==(e=u)?void 0:e.close()).then(function(){throw t})})}))}catch(t){return Promise.reject(t)}},n}();export{v as Conversation};
//# sourceMappingURL=lib.module.js.map

@@ -1,2 +0,2 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e||self).client={})}(this,function(e){function t(){return t=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)({}).hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},t.apply(null,arguments)}function n(e){for(var t=window.atob(e),n=t.length,r=new Uint8Array(n),o=0;o<n;o++)r[o]=t.charCodeAt(o);return r.buffer}var r=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(r),i=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=r}return e.create=function(t){try{var n=null,r=null;return Promise.resolve(function(i,s){try{var a=function(){function i(){return Promise.resolve(n.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var o=n.createMediaStreamSource(r=t),i=new AudioWorkletNode(n,"raw-audio-processor");return o.connect(a),a.connect(i),new e(n,a,i,r)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(s?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!s)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(i):i()}()}catch(e){return s(e)}return a&&a.then?a.then(void 0,s):a}(0,function(e){var t,o;throw null==(t=r)||t.getTracks().forEach(function(e){return e.stop()}),null==(o=n)||o.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),s=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(s),u=/*#__PURE__*/function(){function e(e,t,n,r){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=r}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var i=(s=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(s),s.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(a)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,s,u,t)}))}catch(e){return o(e)}var s,u;return i&&i.then?i.then(void 0,o):i}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function c(e){return!!e.type}var l=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}return e.create=function(t){try{var n=null;return Promise.resolve(function(r,o){try{var i=(a=null!=(s=t.origin)?s:"wss://api.elevenlabs.io",u=t.signedUrl?t.signedUrl:a+"/v1/convai/conversation?agent_id="+t.agentId,l=["convai"],t.authorization&&l.push("bearer."+t.authorization),n=new WebSocket(u,l),Promise.resolve(new Promise(function(e,t){n.addEventListener("error",t),n.addEventListener("close",t),n.addEventListener("message",function(t){var n=JSON.parse(t.data);c(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var r=t.conversation_id,o=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,r,o)}))}catch(e){return o(e)}var s,a,u,l;return i&&i.then?i.then(void 0,o):i}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){this.socket.close()},e}(),h={onConnect:function(){},onDisconnect:function(){},onError:function(){},onDebug:function(){},onMessage:function(){},onStatusChange:function(){},onModeChange:function(){}};e.Conversation=/*#__PURE__*/function(){function e(e,t,r,o){var i=this,s=this,a=this,u=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==s.status?Promise.resolve():(s.updateStatus("disconnecting"),s.connection.close(),Promise.resolve(s.input.close()).then(function(){return Promise.resolve(s.output.close()).then(function(){s.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==i.mode&&(i.mode=e,i.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==i.status&&(i.status=e,i.options.onStatusChange({status:e}))},this.onEvent=function(e){try{var t=JSON.parse(e.data);if(!c(t))return;switch(t.type){case"interruption":t.interruption_event&&(i.lastInterruptTimestamp=t.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:t.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response});break;case"audio":i.lastInterruptTimestamp<=t.audio_event.event_id&&(i.addAudioBase64Chunk(t.audio_event.audio_base_64),i.updateMode("speaking"));break;case"ping":i.connection.socket.send(JSON.stringify({type:"pong",event_id:t.ping_event.event_id}));break;default:i.options.onDebug(t)}}catch(t){return void i.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=function(e){var t,n,r=JSON.stringify({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))});"connected"===i.status&&i.connection.socket.send(r)},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&i.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return a.output.gain.gain.value=a.volume,a.output.worklet.port.postMessage({type:"clearInterrupted"}),a.output.worklet.port.postMessage({type:"buffer",buffer:n(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return u.updateMode("listening"),u.output.worklet.port.postMessage({type:"interrupt"}),u.output.gain.gain.exponentialRampToValueAtTime(1e-4,u.output.context.currentTime+2),setTimeout(function(){u.output.gain.gain.value=u.volume,u.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),i.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return i.connection.conversationId},this.setVolume=function(e){i.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.options=e,this.connection=t,this.input=r,this.output=o,this.options.onConnect({conversationId:t.conversationId}),this.connection.socket.addEventListener("message",function(e){i.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){i.updateStatus("disconnected"),i.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return e.startSession=function(n){try{var r=t({},h,n);r.onStatusChange({status:"connecting"});var o=null,s=null,a=null;return Promise.resolve(function(t,c){try{var h=Promise.resolve(i.create(16e3)).then(function(t){return o=t,Promise.resolve(l.create(n)).then(function(t){return s=t,Promise.resolve(u.create(s.sampleRate)).then(function(t){return new e(r,s,o,a=t)})})})}catch(e){return c(e)}return h&&h.then?h.then(void 0,c):h}(0,function(e){var t,n;return r.onStatusChange({status:"disconnected"}),null==(t=s)||t.close(),Promise.resolve(null==(n=o)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=a)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},e}()});
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e||self).client={})}(this,function(e){function t(){return t=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var o in n)({}).hasOwnProperty.call(n,o)&&(e[o]=n[o])}return e},t.apply(null,arguments)}function n(e){for(var t=window.atob(e),n=t.length,o=new Uint8Array(n),r=0;r<n;r++)o[r]=t.charCodeAt(r);return o.buffer}var o=new Blob(['\n const TARGET_SAMPLE_RATE = 16000;\n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = TARGET_SAMPLE_RATE / 4; // Define the threshold for buffer size to be ~0.25s\n\n if (globalThis.LibSampleRate && sampleRate !== TARGET_SAMPLE_RATE) {\n globalThis.LibSampleRate.create(1, sampleRate, TARGET_SAMPLE_RATE).then(resampler => {\n this.resampler = resampler;\n });\n }\n }\n process(inputs, outputs) {\n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let pcm16Array = new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n \n // Scale the sample to the range [-32768, 32767] and store it in the Int16Array\n pcm16Array[i] = sample < 0 ? sample * 32768 : sample * 32767;\n }\n \n // Send the buffered data to the main script\n this.port.postMessage([pcm16Array, maxVolume]);\n \n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(o),i=/*#__PURE__*/function(){function e(e,t,n,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=o}return e.create=function(t){try{var n=null,o=null;return Promise.resolve(function(i,s){try{var a=function(){function i(){return Promise.resolve(n.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0}}})).then(function(t){var r=n.createMediaStreamSource(o=t),i=new AudioWorkletNode(n,"raw-audio-processor");return r.connect(a),a.connect(i),new e(n,a,i,o)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,a=(n=new window.AudioContext(s?{sampleRate:t}:{})).createAnalyser(),u=function(){if(!s)return Promise.resolve(n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return u&&u.then?u.then(i):i()}()}catch(e){return s(e)}return a&&a.then?a.then(void 0,s):a}(0,function(e){var t,r;throw null==(t=o)||t.getTracks().forEach(function(e){return e.stop()}),null==(r=n)||r.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),s=new Blob(['\n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n\n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(new Int16Array(data.buffer));\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n output[i] = this.currentBuffer[this.cursor] / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(s),u=/*#__PURE__*/function(){function e(e,t,n,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=o}return e.create=function(t){try{var n=null;return Promise.resolve(function(o,r){try{var i=(s=(n=new AudioContext({sampleRate:t})).createAnalyser(),(u=n.createGain()).connect(s),s.connect(n.destination),Promise.resolve(n.audioWorklet.addModule(a)).then(function(){var t=new AudioWorkletNode(n,"audio-concat-processor");return t.connect(u),new e(n,s,u,t)}))}catch(e){return r(e)}var s,u;return i&&i.then?i.then(void 0,r):i}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function c(e){return!!e.type}var l=/*#__PURE__*/function(){function e(e,t,n){this.socket=void 0,this.conversationId=void 0,this.sampleRate=void 0,this.socket=e,this.conversationId=t,this.sampleRate=n}e.create=function(t){try{var n=null;return Promise.resolve(function(o,r){try{var i=(a=null!=(s=t.origin)?s:"wss://api.elevenlabs.io",u=t.signedUrl?t.signedUrl:a+"/v1/convai/conversation?agent_id="+t.agentId,l=["convai"],t.authorization&&l.push("bearer."+t.authorization),n=new WebSocket(u,l),Promise.resolve(new Promise(function(e,o){n.addEventListener("open",function(){if(t.overrides){var e,o,r,i,s,a={type:"conversation_initiation_client_data",conversation_initiation_client_data:{custom_llm_extra_body:t.overrides.customLlmExtraBody,agent:{prompt:null==(e=t.overrides.agent)?void 0:e.prompt,first_message:null==(o=t.overrides.agent)?void 0:o.firstMessage,language:null==(r=t.overrides.agent)?void 0:r.language},tts:{voice_id:null==(i=t.overrides.tts)?void 0:i.voiceId}}};null==(s=n)||s.send(JSON.stringify(a))}},{once:!0}),n.addEventListener("error",o),n.addEventListener("close",o),n.addEventListener("message",function(t){var n=JSON.parse(t.data);c(n)&&("conversation_initiation_metadata"===n.type?e(n.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(t){var o=t.conversation_id,r=parseInt(t.agent_output_audio_format.replace("pcm_",""));return new e(n,o,r)}))}catch(e){return r(e)}var s,a,u,l;return i&&i.then?i.then(void 0,r):i}(0,function(e){var t;throw null==(t=n)||t.close(),e}))}catch(e){return Promise.reject(e)}};var t=e.prototype;return t.close=function(){this.socket.close()},t.sendMessage=function(e){this.socket.send(JSON.stringify(e))},e}();function h(e,t){try{var n=e()}catch(e){return t(e)}return n&&n.then?n.then(void 0,t):n}function f(e,t,n){if(!e.s){if(n instanceof p){if(!n.s)return void(n.o=f.bind(null,e,t));1&t&&(t=n.s),n=n.v}if(n&&n.then)return void n.then(f.bind(null,e,t),f.bind(null,e,2));e.s=t,e.v=n;var o=e.o;o&&o(e)}}var d={clientTools:{}},p=/*#__PURE__*/function(){function e(){}return e.prototype.then=function(t,n){var o=new e,r=this.s;if(r){var i=1&r?t:n;if(i){try{f(o,1,i(this.v))}catch(e){f(o,2,e)}return o}return this}return this.o=function(e){try{var r=e.v;1&e.s?f(o,1,t?t(r):r):n?f(o,1,n(r)):f(o,2,r)}catch(e){f(o,2,e)}},o},e}(),v={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){}};e.Conversation=/*#__PURE__*/function(){function e(e,t,o,r){var i=this,s=this,a=this,u=this,l=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.endSession=function(){try{return"connected"!==s.status?Promise.resolve():(s.updateStatus("disconnecting"),s.connection.close(),Promise.resolve(s.input.close()).then(function(){return Promise.resolve(s.output.close()).then(function(){s.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==i.mode&&(i.mode=e,i.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==i.status&&(i.status=e,i.options.onStatusChange({status:e}))},this.onEvent=function(e){try{return Promise.resolve(h(function(){var t,n=JSON.parse(e.data);if(c(n)){var o=function(e,t){var n,o=-1;e:{for(var r=0;r<t.length;r++){var i=t[r][0];if(i){var s=i();if(s&&s.then)break e;if(s===e){o=r;break}}else o=r}if(-1!==o){do{for(var a=t[o][1];!a;)o++,a=t[o][1];var u=a();if(u&&u.then){n=!0;break e}var c=t[o][2];o++}while(c&&!c());return u}}var l=new p,h=f.bind(null,l,2);return(n?u.then(d):s.then(function n(s){for(;;){if(s===e){o=r;break}if(++r===t.length){if(-1!==o)break;return void f(l,1,u)}if(i=t[r][0]){if((s=i())&&s.then)return void s.then(n).then(void 0,h)}else o=r}do{for(var a=t[o][1];!a;)o++,a=t[o][1];var u=a();if(u&&u.then)return void u.then(d).then(void 0,h);var c=t[o][2];o++}while(c&&!c());f(l,1,u)})).then(void 0,h),l;function d(e){for(;;){var n=t[o][2];if(!n||n())break;o++;for(var r=t[o][1];!r;)o++,r=t[o][1];if((e=r())&&e.then)return void e.then(d).then(void 0,h)}f(l,1,e)}}(n.type,[[function(){return"interruption"},function(){return n.interruption_event&&(a.lastInterruptTimestamp=n.interruption_event.event_id),a.fadeOutAudio(),void(t=1)}],[function(){return"agent_response"},function(){return a.options.onMessage({source:"ai",message:n.agent_response_event.agent_response}),void(t=1)}],[function(){return"user_transcript"},function(){return a.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript}),void(t=1)}],[function(){return"internal_tentative_agent_response"},function(){return a.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response}),void(t=1)}],[function(){return"client_tool_call"},function(){var e=function(){if(a.options.onUnhandledClientToolCall)return a.options.onUnhandledClientToolCall(n.client_tool_call),void(t=1);a.onError("Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({tool_call_id:n.client_tool_call.tool_call_id,response:"Client tool with name "+n.client_tool_call.tool_name+" is not defined on client",is_error:!0}),t=1},o=function(){if(a.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){var e=function(){t=1},o=h(function(){return Promise.resolve(a.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters)).then(function(e){a.connection.sendMessage({tool_call_id:n.client_tool_call.tool_call_id,response:e,is_error:!1})})},function(e){a.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:n.client_tool_call.tool_name}),a.connection.sendMessage({tool_call_id:n.client_tool_call.tool_call_id,response:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})});return o&&o.then?o.then(e):e()}}();return o&&o.then?o.then(e):e()},function(){return t||t}],[function(){return"audio"},function(){return a.lastInterruptTimestamp<=n.audio_event.event_id&&(a.addAudioBase64Chunk(n.audio_event.audio_base_64),a.updateMode("speaking")),void(t=1)}],[function(){return"ping"},function(){return a.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id}),void(t=1)}],[void 0,function(){return a.options.onDebug(n),void(t=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){a.onError("Failed to parse event data",{event:e})}))}catch(e){return Promise.reject(e)}},this.onInputWorkletMessage=function(e){var t,n;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(t=e.data[0].buffer,n=new Uint8Array(t),window.btoa(String.fromCharCode.apply(String,n)))})},this.onOutputWorkletMessage=function(e){var t=e.data;"process"===t.type&&i.updateMode(t.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return u.output.gain.gain.value=u.volume,u.output.worklet.port.postMessage({type:"clearInterrupted"}),u.output.worklet.port.postMessage({type:"buffer",buffer:n(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return l.updateMode("listening"),l.output.worklet.port.postMessage({type:"interrupt"}),l.output.gain.gain.exponentialRampToValueAtTime(1e-4,l.output.context.currentTime+2),setTimeout(function(){l.output.gain.gain.value=l.volume,l.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,t){console.error(e,t),i.options.onError(e,t)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var t=0,n=0;n<e.length;n++)t+=e[n]/255;return(t/=e.length)<0?0:t>1?1:t},this.getId=function(){return i.connection.conversationId},this.setVolume=function(e){i.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.options=e,this.connection=t,this.input=o,this.output=r,this.options.onConnect({conversationId:t.conversationId}),this.connection.socket.addEventListener("message",function(e){i.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){i.updateStatus("disconnected"),i.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return e.startSession=function(n){try{var o=t({},d,v,n);o.onStatusChange({status:"connecting"});var r=null,s=null,a=null;return Promise.resolve(h(function(){return Promise.resolve(i.create(16e3)).then(function(t){return r=t,Promise.resolve(l.create(n)).then(function(t){return s=t,Promise.resolve(u.create(s.sampleRate)).then(function(t){return new e(o,s,r,a=t)})})})},function(e){var t,n;return o.onStatusChange({status:"disconnected"}),null==(t=s)||t.close(),Promise.resolve(null==(n=r)?void 0:n.close()).then(function(){var t;return Promise.resolve(null==(t=a)?void 0:t.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},e}()});
//# sourceMappingURL=lib.umd.js.map

@@ -0,4 +1,18 @@

import { OutgoingSocketEvent } from "./events";
export type SessionConfig = {
origin?: string;
authorization?: string;
overrides?: {
customLlmExtraBody?: any;
agent?: {
prompt?: {
prompt?: string;
};
firstMessage?: string;
language?: "en";
};
tts?: {
voiceId?: string;
};
};
} & ({

@@ -18,2 +32,3 @@ signedUrl: string;

close(): void;
sendMessage(message: OutgoingSocketEvent): void;
}

@@ -13,3 +13,3 @@ export type UserTranscriptionEvent = {

};
export type AudioEvent = {
export type AgentAudioEvent = {
type: "audio";

@@ -47,3 +47,41 @@ audio_event: {

};
export type SocketEvent = UserTranscriptionEvent | AgentResponseEvent | AudioEvent | InterruptionEvent | InternalTentativeAgentResponseEvent | ConfigEvent | PingEvent;
export declare function isValidSocketEvent(event: any): event is SocketEvent;
export type ClientToolCallEvent = {
type: "client_tool_call";
client_tool_call: {
tool_name: string;
tool_call_id: string;
parameters: any;
expects_response: boolean;
};
};
export type IncomingSocketEvent = UserTranscriptionEvent | AgentResponseEvent | AgentAudioEvent | InterruptionEvent | InternalTentativeAgentResponseEvent | ConfigEvent | PingEvent | ClientToolCallEvent;
export type PongEvent = {
type: "pong";
event_id: number;
};
export type UserAudioEvent = {
user_audio_chunk: string;
};
export type ClientToolResultEvent = {
tool_call_id: string;
response: any;
is_error: boolean;
};
export type ClientOverridesEvent = {
type: "conversation_initiation_client_data";
conversation_initiation_client_data: {
custom_llm_extra_body?: any;
agent?: {
prompt?: {
prompt?: string;
};
first_message?: string;
language?: "en";
};
tts?: {
voice_id?: string;
};
};
};
export type OutgoingSocketEvent = PongEvent | UserAudioEvent | ClientOverridesEvent | ClientToolResultEvent;
export declare function isValidSocketEvent(event: any): event is IncomingSocketEvent;
{
"name": "@11labs/client",
"version": "0.0.2",
"version": "0.0.3-beta.1",
"description": "ElevenLabs JavaScript Client Library",

@@ -5,0 +5,0 @@ "main": "./dist/lib.umd.js",

@@ -111,2 +111,23 @@ # ElevenLabs JavaScript Client Library

#### Client Tools
Client tools are a way to enabled agent to invoke client-side functionality. This can be used to trigger actions in the client, such as opening a modal or doing an API call on behalf of the user.
Client tools definition is an object of functions, and needs to be identical with your configuration within the [ElevenLabs UI](https://elevenlabs.io/app/conversational-ai), where you can name and describe different tools, as well as set up the parameters passed by the agent.
```ts
const conversation = await Conversation.startSession({
clientTools: {
displayMessage: async (parameters: {text: string}) => {
alert(text);
return "Message displayed";
}
}
});
```
In case function returns a value, it will be passed back to the agent as a response.
Note that the tool needs to be explicitly set to be blocking conversation in ElevenLabs UI for the agent to await and react to the response, otherwise agent assumes success and continues the conversation.
#### Return value

@@ -113,0 +134,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc