@11labs/client
Advanced tools
Comparing version 0.0.5-beta.1 to 0.0.5-beta.2
@@ -1,2 +0,2 @@ | ||
import { Input } from "./utils/input"; | ||
import { Input, InputConfig } from "./utils/input"; | ||
import { Output } from "./utils/output"; | ||
@@ -10,3 +10,3 @@ import { SessionConfig } from "./utils/connection"; | ||
export type Status = "connecting" | "connected" | "disconnecting" | "disconnected"; | ||
export type Options = SessionConfig & Callbacks & ClientToolsConfig; | ||
export type Options = SessionConfig & Callbacks & ClientToolsConfig & InputConfig; | ||
export type ClientToolsConfig = { | ||
@@ -42,3 +42,3 @@ clientTools: Record<string, (parameters: any) => Promise<string | number | void> | string | number | void>; | ||
readonly output: Output; | ||
static startSession(options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig>): Promise<Conversation>; | ||
static startSession(options: SessionConfig & Partial<Callbacks> & Partial<ClientToolsConfig> & Partial<InputConfig>): Promise<Conversation>; | ||
private lastInterruptTimestamp; | ||
@@ -45,0 +45,0 @@ private mode; |
@@ -1,2 +0,2 @@ | ||
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)({}).hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},e.apply(null,arguments)}function t(e){const t=new Uint8Array(e);return window.btoa(String.fromCharCode(...t))}function n(e){const t=window.atob(e),n=t.length,a=new Uint8Array(n);for(let e=0;e<n;e++)a[e]=t.charCodeAt(e);return a.buffer}const a=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(a);class o{static async create({sampleRate:e,format:t}){let n=null,a=null;try{const i=navigator.mediaDevices.getSupportedConstraints().sampleRate;n=new window.AudioContext(i?{sampleRate:e}:{});const r=n.createAnalyser();i||await n.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await n.audioWorklet.addModule(s),a=await navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:e},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}}});const l=n.createMediaStreamSource(a),c=new AudioWorkletNode(n,"raw-audio-processor");return c.port.postMessage({type:"setFormat",format:t,sampleRate:e}),l.connect(r),r.connect(c),new o(n,r,c,a)}catch(e){var i,r;throw null==(i=a)||i.getTracks().forEach(e=>e.stop()),null==(r=n)||r.close(),e}}constructor(e,t,n,a){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=a}async close(){this.inputStream.getTracks().forEach(e=>e.stop()),await this.context.close()}}const i=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(i);class l{static async create({sampleRate:e,format:t}){let n=null;try{n=new AudioContext({sampleRate:e});const a=n.createAnalyser(),s=n.createGain();s.connect(a),a.connect(n.destination),await n.audioWorklet.addModule(r);const o=new AudioWorkletNode(n,"audio-concat-processor");return o.port.postMessage({type:"setFormat",format:t}),o.connect(s),new l(n,a,s,o)}catch(e){var a;throw null==(a=n)||a.close(),e}}constructor(e,t,n,a){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=a}async close(){await this.context.close()}}function c(e){return!!e.type}class u{static async create(e){let t=null;try{var n;const a=null!=(n=e.origin)?n:"wss://api.elevenlabs.io",s=e.signedUrl?e.signedUrl:a+"/v1/convai/conversation?agent_id="+e.agentId,o=["convai"];e.authorization&&o.push(`bearer.${e.authorization}`),t=new WebSocket(s,o);const i=await new Promise((n,a)=>{t.addEventListener("open",()=>{var n;const a={type:"conversation_initiation_client_data"};var s,o,i,r;e.overrides&&(a.conversation_config_override={agent:{prompt:null==(s=e.overrides.agent)?void 0:s.prompt,first_message:null==(o=e.overrides.agent)?void 0:o.firstMessage,language:null==(i=e.overrides.agent)?void 0:i.language},tts:{voice_id:null==(r=e.overrides.tts)?void 0:r.voiceId}}),e.customLlmExtraBody&&(a.custom_llm_extra_body=e.customLlmExtraBody),null==(n=t)||n.send(JSON.stringify(a))},{once:!0}),t.addEventListener("error",a),t.addEventListener("close",a),t.addEventListener("message",e=>{const t=JSON.parse(e.data);c(t)&&("conversation_initiation_metadata"===t.type?n(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),{conversation_id:r,agent_output_audio_format:l,user_input_audio_format:p}=i,h=d(null!=p?p:"pcm_16000"),m=d(l);return new u(t,r,h,m)}catch(e){var a;throw null==(a=t)||a.close(),e}}constructor(e,t,n,a){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=t,this.inputFormat=n,this.outputFormat=a}close(){this.socket.close()}sendMessage(e){this.socket.send(JSON.stringify(e))}}function d(e){const[t,n]=e.split("_");if(!["pcm","ulaw"].includes(t))throw new Error(`Invalid format: ${e}`);const a=parseInt(n);if(isNaN(a))throw new Error(`Invalid sample rate: ${n}`);return{format:t,sampleRate:a}}const p={clientTools:{}},h={onConnect:()=>{},onDebug:()=>{},onDisconnect:()=>{},onError:()=>{},onMessage:()=>{},onModeChange:()=>{},onStatusChange:()=>{},onCanSendFeedbackChange:()=>{}};class m{static async startSession(t){const n=e({},p,h,t);n.onStatusChange({status:"connecting"}),n.onCanSendFeedbackChange({canSendFeedback:!1});let a=null,s=null,i=null;try{return s=await u.create(t),[a,i]=await Promise.all([o.create(s.inputFormat),l.create(s.outputFormat)]),new m(n,s,a,i)}catch(e){var r,c,d;throw n.onStatusChange({status:"disconnected"}),null==(r=s)||r.close(),await(null==(c=a)?void 0:c.close()),await(null==(d=i)?void 0:d.close()),e}}constructor(e,a,s,o){var i=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=async function(){"connected"===i.status&&(i.updateStatus("disconnecting"),i.connection.close(),await i.input.close(),await i.output.close(),i.updateStatus("disconnected"))},this.updateMode=e=>{e!==this.mode&&(this.mode=e,this.options.onModeChange({mode:e}))},this.updateStatus=e=>{e!==this.status&&(this.status=e,this.options.onStatusChange({status:e}))},this.updateCanSendFeedback=()=>{const e=this.currentEventId!==this.lastFeedbackEventId;this.canSendFeedback!==e&&(this.canSendFeedback=e,this.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=async function(e){try{const n=JSON.parse(e.data);if(!c(n))return;switch(n.type){case"interruption":n.interruption_event&&(i.lastInterruptTimestamp=n.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:n.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response});break;case"client_tool_call":if(i.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){try{var t;const e=null!=(t=await i.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters))?t:"Client tool execution successful.";i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:e,is_error:!1})}catch(e){i.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})}break}if(i.options.onUnhandledClientToolCall){i.options.onUnhandledClientToolCall(n.client_tool_call);break}i.onError(`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,is_error:!0});break;case"audio":i.lastInterruptTimestamp<=n.audio_event.event_id&&(i.addAudioBase64Chunk(n.audio_event.audio_base_64),i.currentEventId=n.audio_event.event_id,i.updateCanSendFeedback(),i.updateMode("speaking"));break;case"ping":i.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id});break;default:i.options.onDebug(n)}}catch(t){return void i.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=e=>{"connected"===this.status&&this.connection.sendMessage({user_audio_chunk:t(e.data[0].buffer)})},this.onOutputWorkletMessage=({data:e})=>{"process"===e.type&&this.updateMode(e.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(e){i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"}),i.output.worklet.port.postMessage({type:"buffer",buffer:n(e)})},this.fadeOutAudio=async function(){i.updateMode("listening"),i.output.worklet.port.postMessage({type:"interrupt"}),i.output.gain.gain.exponentialRampToValueAtTime(1e-4,i.output.context.currentTime+2),setTimeout(()=>{i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(e,t)=>{console.error(e,t),this.options.onError(e,t)},this.calculateVolume=e=>{if(0===e.length)return 0;let t=0;for(let n=0;n<e.length;n++)t+=e[n]/255;return t/=e.length,t<0?0:t>1?1:t},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:e})=>{this.volume=e},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.sendFeedback=e=>{this.canSendFeedback?(this.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:this.currentEventId}),this.lastFeedbackEventId=this.currentEventId,this.updateCanSendFeedback()):console.warn(0===this.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=a,this.input=s,this.output=o,this.options.onConnect({conversationId:a.conversationId}),this.connection.socket.addEventListener("message",e=>{this.onEvent(e)}),this.connection.socket.addEventListener("error",e=>{this.updateStatus("disconnected"),this.onError("Socket error",e)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}function f(e,t,n="https://api.elevenlabs.io"){return fetch(`${n}/v1/convai/conversations/${e}/feedback`,{method:"POST",body:JSON.stringify({feedback:t?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}export{m as Conversation,f as postOverallFeedback}; | ||
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var a in n)({}).hasOwnProperty.call(n,a)&&(e[a]=n[a])}return e},e.apply(null,arguments)}function t(e){const t=new Uint8Array(e);return window.btoa(String.fromCharCode(...t))}function n(e){const t=window.atob(e),n=t.length,a=new Uint8Array(n);for(let e=0;e<n;e++)a[e]=t.charCodeAt(e);return a.buffer}const a=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(a);class o{static async create({sampleRate:e,format:t,preferHeadphonesForIosDevices:n}){let a=null,i=null;try{const r={sampleRate:{ideal:e},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}},l=await navigator.mediaDevices.getUserMedia({audio:!0});if(null==l||l.getTracks().forEach(e=>e.stop()),(["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document)&&n){const e=(await window.navigator.mediaDevices.enumerateDevices()).find(e=>"audioinput"===e.kind&&["airpod","headphone","earphone"].find(t=>e.label.toLowerCase().includes(t)));e&&(r.deviceId={ideal:e.deviceId})}const c=navigator.mediaDevices.getSupportedConstraints().sampleRate;a=new window.AudioContext(c?{sampleRate:e}:{});const u=a.createAnalyser();c||await a.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js"),await a.audioWorklet.addModule(s),i=await navigator.mediaDevices.getUserMedia({audio:r});const d=a.createMediaStreamSource(i),p=new AudioWorkletNode(a,"raw-audio-processor");return p.port.postMessage({type:"setFormat",format:t,sampleRate:e}),d.connect(u),u.connect(p),new o(a,u,p,i)}catch(e){var r,l;throw null==(r=i)||r.getTracks().forEach(e=>e.stop()),null==(l=a)||l.close(),e}}constructor(e,t,n,a){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=t,this.worklet=n,this.inputStream=a}async close(){this.inputStream.getTracks().forEach(e=>e.stop()),await this.context.close()}}const i=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(i);class l{static async create({sampleRate:e,format:t}){let n=null;try{n=new AudioContext({sampleRate:e});const a=n.createAnalyser(),s=n.createGain();s.connect(a),a.connect(n.destination),await n.audioWorklet.addModule(r);const o=new AudioWorkletNode(n,"audio-concat-processor");return o.port.postMessage({type:"setFormat",format:t}),o.connect(s),new l(n,a,s,o)}catch(e){var a;throw null==(a=n)||a.close(),e}}constructor(e,t,n,a){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=t,this.gain=n,this.worklet=a}async close(){await this.context.close()}}function c(e){return!!e.type}class u{static async create(e){let t=null;try{var n;const a=null!=(n=e.origin)?n:"wss://api.elevenlabs.io",s=e.signedUrl?e.signedUrl:a+"/v1/convai/conversation?agent_id="+e.agentId,o=["convai"];e.authorization&&o.push(`bearer.${e.authorization}`),t=new WebSocket(s,o);const i=await new Promise((n,a)=>{t.addEventListener("open",()=>{var n;const a={type:"conversation_initiation_client_data"};var s,o,i,r;e.overrides&&(a.conversation_config_override={agent:{prompt:null==(s=e.overrides.agent)?void 0:s.prompt,first_message:null==(o=e.overrides.agent)?void 0:o.firstMessage,language:null==(i=e.overrides.agent)?void 0:i.language},tts:{voice_id:null==(r=e.overrides.tts)?void 0:r.voiceId}}),e.customLlmExtraBody&&(a.custom_llm_extra_body=e.customLlmExtraBody),null==(n=t)||n.send(JSON.stringify(a))},{once:!0}),t.addEventListener("error",a),t.addEventListener("close",a),t.addEventListener("message",e=>{const t=JSON.parse(e.data);c(t)&&("conversation_initiation_metadata"===t.type?n(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})}),{conversation_id:r,agent_output_audio_format:l,user_input_audio_format:p}=i,h=d(null!=p?p:"pcm_16000"),m=d(l);return new u(t,r,h,m)}catch(e){var a;throw null==(a=t)||a.close(),e}}constructor(e,t,n,a){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=t,this.inputFormat=n,this.outputFormat=a}close(){this.socket.close()}sendMessage(e){this.socket.send(JSON.stringify(e))}}function d(e){const[t,n]=e.split("_");if(!["pcm","ulaw"].includes(t))throw new Error(`Invalid format: ${e}`);const a=parseInt(n);if(isNaN(a))throw new Error(`Invalid sample rate: ${n}`);return{format:t,sampleRate:a}}const p={clientTools:{}},h={onConnect:()=>{},onDebug:()=>{},onDisconnect:()=>{},onError:()=>{},onMessage:()=>{},onModeChange:()=>{},onStatusChange:()=>{},onCanSendFeedbackChange:()=>{}};class m{static async startSession(t){const n=e({},p,h,t);n.onStatusChange({status:"connecting"}),n.onCanSendFeedbackChange({canSendFeedback:!1});let a=null,s=null,i=null;try{return s=await u.create(t),[a,i]=await Promise.all([o.create(e({},s.inputFormat,{preferHeadphonesForIosDevices:t.preferHeadphonesForIosDevices})),l.create(s.outputFormat)]),new m(n,s,a,i)}catch(e){var r,c,d;throw n.onStatusChange({status:"disconnected"}),null==(r=s)||r.close(),await(null==(c=a)?void 0:c.close()),await(null==(d=i)?void 0:d.close()),e}}constructor(e,a,s,o){var i=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=async function(){"connected"===i.status&&(i.updateStatus("disconnecting"),i.connection.close(),await i.input.close(),await i.output.close(),i.updateStatus("disconnected"))},this.updateMode=e=>{e!==this.mode&&(this.mode=e,this.options.onModeChange({mode:e}))},this.updateStatus=e=>{e!==this.status&&(this.status=e,this.options.onStatusChange({status:e}))},this.updateCanSendFeedback=()=>{const e=this.currentEventId!==this.lastFeedbackEventId;this.canSendFeedback!==e&&(this.canSendFeedback=e,this.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=async function(e){try{const n=JSON.parse(e.data);if(!c(n))return;switch(n.type){case"interruption":n.interruption_event&&(i.lastInterruptTimestamp=n.interruption_event.event_id),i.fadeOutAudio();break;case"agent_response":i.options.onMessage({source:"ai",message:n.agent_response_event.agent_response});break;case"user_transcript":i.options.onMessage({source:"user",message:n.user_transcription_event.user_transcript});break;case"internal_tentative_agent_response":i.options.onDebug({type:"tentative_agent_response",response:n.tentative_agent_response_internal_event.tentative_agent_response});break;case"client_tool_call":if(i.options.clientTools.hasOwnProperty(n.client_tool_call.tool_name)){try{var t;const e=null!=(t=await i.options.clientTools[n.client_tool_call.tool_name](n.client_tool_call.parameters))?t:"Client tool execution successful.";i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:e,is_error:!1})}catch(e){i.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})}break}if(i.options.onUnhandledClientToolCall){i.options.onUnhandledClientToolCall(n.client_tool_call);break}i.onError(`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,{clientToolName:n.client_tool_call.tool_name}),i.connection.sendMessage({type:"client_tool_result",tool_call_id:n.client_tool_call.tool_call_id,result:`Client tool with name ${n.client_tool_call.tool_name} is not defined on client`,is_error:!0});break;case"audio":i.lastInterruptTimestamp<=n.audio_event.event_id&&(i.addAudioBase64Chunk(n.audio_event.audio_base_64),i.currentEventId=n.audio_event.event_id,i.updateCanSendFeedback(),i.updateMode("speaking"));break;case"ping":i.connection.sendMessage({type:"pong",event_id:n.ping_event.event_id});break;default:i.options.onDebug(n)}}catch(t){return void i.onError("Failed to parse event data",{event:e})}},this.onInputWorkletMessage=e=>{"connected"===this.status&&this.connection.sendMessage({user_audio_chunk:t(e.data[0].buffer)})},this.onOutputWorkletMessage=({data:e})=>{"process"===e.type&&this.updateMode(e.finished?"listening":"speaking")},this.addAudioBase64Chunk=async function(e){i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"}),i.output.worklet.port.postMessage({type:"buffer",buffer:n(e)})},this.fadeOutAudio=async function(){i.updateMode("listening"),i.output.worklet.port.postMessage({type:"interrupt"}),i.output.gain.gain.exponentialRampToValueAtTime(1e-4,i.output.context.currentTime+2),setTimeout(()=>{i.output.gain.gain.value=i.volume,i.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3)},this.onError=(e,t)=>{console.error(e,t),this.options.onError(e,t)},this.calculateVolume=e=>{if(0===e.length)return 0;let t=0;for(let n=0;n<e.length;n++)t+=e[n]/255;return t/=e.length,t<0?0:t>1?1:t},this.getId=()=>this.connection.conversationId,this.setVolume=({volume:e})=>{this.volume=e},this.getInputByteFrequencyData=()=>(null!=this.inputFrequencyData||(this.inputFrequencyData=new Uint8Array(this.input.analyser.frequencyBinCount)),this.input.analyser.getByteFrequencyData(this.inputFrequencyData),this.inputFrequencyData),this.getOutputByteFrequencyData=()=>(null!=this.outputFrequencyData||(this.outputFrequencyData=new Uint8Array(this.output.analyser.frequencyBinCount)),this.output.analyser.getByteFrequencyData(this.outputFrequencyData),this.outputFrequencyData),this.getInputVolume=()=>this.calculateVolume(this.getInputByteFrequencyData()),this.getOutputVolume=()=>this.calculateVolume(this.getOutputByteFrequencyData()),this.sendFeedback=e=>{this.canSendFeedback?(this.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:this.currentEventId}),this.lastFeedbackEventId=this.currentEventId,this.updateCanSendFeedback()):console.warn(0===this.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=a,this.input=s,this.output=o,this.options.onConnect({conversationId:a.conversationId}),this.connection.socket.addEventListener("message",e=>{this.onEvent(e)}),this.connection.socket.addEventListener("error",e=>{this.updateStatus("disconnected"),this.onError("Socket error",e)}),this.connection.socket.addEventListener("close",()=>{this.updateStatus("disconnected"),this.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}}function f(e,t,n="https://api.elevenlabs.io"){return fetch(`${n}/v1/convai/conversations/${e}/feedback`,{method:"POST",body:JSON.stringify({feedback:t?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}export{m as Conversation,f as postOverallFeedback}; | ||
//# sourceMappingURL=lib.modern.js.map |
@@ -1,2 +0,2 @@ | ||
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var o in t)({}).hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e},e.apply(null,arguments)}function n(e){for(var n=window.atob(e),t=n.length,o=new Uint8Array(t),r=0;r<t;r++)o[r]=n.charCodeAt(r);return o.buffer}var t=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(t),r=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=n,this.worklet=t,this.inputStream=o}return e.create=function(n){var t=n.sampleRate,r=n.format;try{var a=null,i=null;return Promise.resolve(function(n,s){try{var u=function(){function n(){return Promise.resolve(a.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}}})).then(function(n){var o=a.createMediaStreamSource(i=n),s=new AudioWorkletNode(a,"raw-audio-processor");return s.port.postMessage({type:"setFormat",format:r,sampleRate:t}),o.connect(u),u.connect(s),new e(a,u,s,i)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,u=(a=new window.AudioContext(s?{sampleRate:t}:{})).createAnalyser(),c=function(){if(!s)return Promise.resolve(a.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return c&&c.then?c.then(n):n()}()}catch(e){return s(e)}return u&&u.then?u.then(void 0,s):u}(0,function(e){var n,t;throw null==(n=i)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=a)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),a=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),i=URL.createObjectURL(a),s=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=n,this.gain=t,this.worklet=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var r=null;return Promise.resolve(function(n,a){try{var s=(u=(r=new AudioContext({sampleRate:t})).createAnalyser(),(c=r.createGain()).connect(u),u.connect(r.destination),Promise.resolve(r.audioWorklet.addModule(i)).then(function(){var n=new AudioWorkletNode(r,"audio-concat-processor");return n.port.postMessage({type:"setFormat",format:o}),n.connect(c),new e(r,u,c,n)}))}catch(e){return a(e)}var u,c;return s&&s.then?s.then(void 0,a):s}(0,function(e){var n;throw null==(n=r)||n.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,n,t,o){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=n,this.inputFormat=t,this.outputFormat=o}e.create=function(n){try{var t=null;return Promise.resolve(function(o,r){try{var a=(s=null!=(i=n.origin)?i:"wss://api.elevenlabs.io",c=n.signedUrl?n.signedUrl:s+"/v1/convai/conversation?agent_id="+n.agentId,d=["convai"],n.authorization&&d.push("bearer."+n.authorization),t=new WebSocket(c,d),Promise.resolve(new Promise(function(e,o){t.addEventListener("open",function(){var e,o,r,a,i,s={type:"conversation_initiation_client_data"};n.overrides&&(s.conversation_config_override={agent:{prompt:null==(o=n.overrides.agent)?void 0:o.prompt,first_message:null==(r=n.overrides.agent)?void 0:r.firstMessage,language:null==(a=n.overrides.agent)?void 0:a.language},tts:{voice_id:null==(i=n.overrides.tts)?void 0:i.voiceId}}),n.customLlmExtraBody&&(s.custom_llm_extra_body=n.customLlmExtraBody),null==(e=t)||e.send(JSON.stringify(s))},{once:!0}),t.addEventListener("error",o),t.addEventListener("close",o),t.addEventListener("message",function(n){var t=JSON.parse(n.data);u(t)&&("conversation_initiation_metadata"===t.type?e(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(n){var o=n.conversation_id,r=n.agent_output_audio_format,a=n.user_input_audio_format,i=l(null!=a?a:"pcm_16000"),s=l(r);return new e(t,o,i,s)}))}catch(e){return r(e)}var i,s,c,d;return a&&a.then?a.then(void 0,r):a}(0,function(e){var n;throw null==(n=t)||n.close(),e}))}catch(e){return Promise.reject(e)}};var n=e.prototype;return n.close=function(){this.socket.close()},n.sendMessage=function(e){this.socket.send(JSON.stringify(e))},e}();function l(e){var n=e.split("_"),t=n[0],o=n[1];if(!["pcm","ulaw"].includes(t))throw new Error("Invalid format: "+e);var r=parseInt(o);if(isNaN(r))throw new Error("Invalid sample rate: "+o);return{format:t,sampleRate:r}}function d(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var f={clientTools:{}};function p(e,n,t){if(!e.s){if(t instanceof v){if(!t.s)return void(t.o=p.bind(null,e,n));1&n&&(n=t.s),t=t.v}if(t&&t.then)return void t.then(p.bind(null,e,n),p.bind(null,e,2));e.s=n,e.v=t;var o=e.o;o&&o(e)}}var h={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){},onCanSendFeedbackChange:function(){}},v=/*#__PURE__*/function(){function e(){}return e.prototype.then=function(n,t){var o=new e,r=this.s;if(r){var a=1&r?n:t;if(a){try{p(o,1,a(this.v))}catch(e){p(o,2,e)}return o}return this}return this.o=function(e){try{var r=e.v;1&e.s?p(o,1,n?n(r):r):t?p(o,1,t(r)):p(o,2,r)}catch(e){p(o,2,e)}},o},e}(),m=/*#__PURE__*/function(){function t(e,t,o,r){var a=this,i=this,s=this,c=this,l=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=function(){try{return"connected"!==i.status?Promise.resolve():(i.updateStatus("disconnecting"),i.connection.close(),Promise.resolve(i.input.close()).then(function(){return Promise.resolve(i.output.close()).then(function(){i.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==a.mode&&(a.mode=e,a.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==a.status&&(a.status=e,a.options.onStatusChange({status:e}))},this.updateCanSendFeedback=function(){var e=a.currentEventId!==a.lastFeedbackEventId;a.canSendFeedback!==e&&(a.canSendFeedback=e,a.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=function(e){try{return Promise.resolve(d(function(){var n,t=JSON.parse(e.data);if(u(t)){var o=function(e,n){var t,o=-1;e:{for(var r=0;r<n.length;r++){var a=n[r][0];if(a){var i=a();if(i&&i.then)break e;if(i===e){o=r;break}}else o=r}if(-1!==o){do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then){t=!0;break e}var c=n[o][2];o++}while(c&&!c());return u}}var l=new v,d=p.bind(null,l,2);return(t?u.then(f):i.then(function t(i){for(;;){if(i===e){o=r;break}if(++r===n.length){if(-1!==o)break;return void p(l,1,u)}if(a=n[r][0]){if((i=a())&&i.then)return void i.then(t).then(void 0,d)}else o=r}do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then)return void u.then(f).then(void 0,d);var c=n[o][2];o++}while(c&&!c());p(l,1,u)})).then(void 0,d),l;function f(e){for(;;){var t=n[o][2];if(!t||t())break;o++;for(var r=n[o][1];!r;)o++,r=n[o][1];if((e=r())&&e.then)return void e.then(f).then(void 0,d)}p(l,1,e)}}(t.type,[[function(){return"interruption"},function(){return t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio(),void(n=1)}],[function(){return"agent_response"},function(){return s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response}),void(n=1)}],[function(){return"user_transcript"},function(){return s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript}),void(n=1)}],[function(){return"internal_tentative_agent_response"},function(){return s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response}),void(n=1)}],[function(){return"client_tool_call"},function(){var e=function(){if(s.options.onUnhandledClientToolCall)return s.options.onUnhandledClientToolCall(t.client_tool_call),void(n=1);s.onError("Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",is_error:!0}),n=1},o=function(){if(s.options.clientTools.hasOwnProperty(t.client_tool_call.tool_name)){var e=function(){n=1},o=d(function(){return Promise.resolve(s.options.clientTools[t.client_tool_call.tool_name](t.client_tool_call.parameters)).then(function(e){s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:e,is_error:!1})})},function(e){s.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})});return o&&o.then?o.then(e):e()}}();return o&&o.then?o.then(e):e()},function(){return n||n}],[function(){return"audio"},function(){return s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.currentEventId=t.audio_event.event_id,s.updateCanSendFeedback(),s.updateMode("speaking")),void(n=1)}],[function(){return"ping"},function(){return s.connection.sendMessage({type:"pong",event_id:t.ping_event.event_id}),void(n=1)}],[void 0,function(){return s.options.onDebug(t),void(n=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){s.onError("Failed to parse event data",{event:e})}))}catch(e){return Promise.reject(e)}},this.onInputWorkletMessage=function(e){var n,t;"connected"===a.status&&a.connection.sendMessage({user_audio_chunk:(n=e.data[0].buffer,t=new Uint8Array(n),window.btoa(String.fromCharCode.apply(String,t)))})},this.onOutputWorkletMessage=function(e){var n=e.data;"process"===n.type&&a.updateMode(n.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"}),c.output.worklet.port.postMessage({type:"buffer",buffer:n(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return l.updateMode("listening"),l.output.worklet.port.postMessage({type:"interrupt"}),l.output.gain.gain.exponentialRampToValueAtTime(1e-4,l.output.context.currentTime+2),setTimeout(function(){l.output.gain.gain.value=l.volume,l.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,n){console.error(e,n),a.options.onError(e,n)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var n=0,t=0;t<e.length;t++)n+=e[t]/255;return(n/=e.length)<0?0:n>1?1:n},this.getId=function(){return a.connection.conversationId},this.setVolume=function(e){a.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=a.inputFrequencyData||(a.inputFrequencyData=new Uint8Array(a.input.analyser.frequencyBinCount)),a.input.analyser.getByteFrequencyData(a.inputFrequencyData),a.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=a.outputFrequencyData||(a.outputFrequencyData=new Uint8Array(a.output.analyser.frequencyBinCount)),a.output.analyser.getByteFrequencyData(a.outputFrequencyData),a.outputFrequencyData},this.getInputVolume=function(){return a.calculateVolume(a.getInputByteFrequencyData())},this.getOutputVolume=function(){return a.calculateVolume(a.getOutputByteFrequencyData())},this.sendFeedback=function(e){a.canSendFeedback?(a.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:a.currentEventId}),a.lastFeedbackEventId=a.currentEventId,a.updateCanSendFeedback()):console.warn(0===a.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=t,this.input=o,this.output=r,this.options.onConnect({conversationId:t.conversationId}),this.connection.socket.addEventListener("message",function(e){a.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){a.updateStatus("disconnected"),a.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){a.updateStatus("disconnected"),a.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return t.startSession=function(n){try{var o=e({},f,h,n);o.onStatusChange({status:"connecting"}),o.onCanSendFeedbackChange({canSendFeedback:!1});var a=null,i=null,u=null;return Promise.resolve(d(function(){return Promise.resolve(c.create(n)).then(function(e){return i=e,Promise.resolve(Promise.all([r.create(i.inputFormat),s.create(i.outputFormat)])).then(function(e){return new t(o,i,a=e[0],u=e[1])})})},function(e){var n,t;return o.onStatusChange({status:"disconnected"}),null==(n=i)||n.close(),Promise.resolve(null==(t=a)?void 0:t.close()).then(function(){var n;return Promise.resolve(null==(n=u)?void 0:n.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},t}();function g(e,n,t){return void 0===t&&(t="https://api.elevenlabs.io"),fetch(t+"/v1/convai/conversations/"+e+"/feedback",{method:"POST",body:JSON.stringify({feedback:n?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}export{m as Conversation,g as postOverallFeedback}; | ||
function e(){return e=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var o in t)({}).hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e},e.apply(null,arguments)}function n(e){for(var n=window.atob(e),t=n.length,o=new Uint8Array(t),r=0;r<t;r++)o[r]=n.charCodeAt(r);return o.buffer}var t=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),o=URL.createObjectURL(t),r=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=n,this.worklet=t,this.inputStream=o}return e.create=function(n){var t=n.sampleRate,r=n.format,i=n.preferHeadphonesForIosDevices;try{var a=null,s=null;return Promise.resolve(function(n,u){try{var c=(l={sampleRate:{ideal:t},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}},Promise.resolve(navigator.mediaDevices.getUserMedia({audio:!0})).then(function(n){function u(){function n(){return Promise.resolve(a.audioWorklet.addModule(o)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:l})).then(function(n){var o=a.createMediaStreamSource(s=n),i=new AudioWorkletNode(a,"raw-audio-processor");return i.port.postMessage({type:"setFormat",format:r,sampleRate:t}),o.connect(u),u.connect(i),new e(a,u,i,s)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,u=(a=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),c=function(){if(!i)return Promise.resolve(a.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return c&&c.then?c.then(n):n()}null==n||n.getTracks().forEach(function(e){return e.stop()});var c=function(){if((["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document)&&i)return Promise.resolve(window.navigator.mediaDevices.enumerateDevices()).then(function(e){var n=e.find(function(e){return"audioinput"===e.kind&&["airpod","headphone","earphone"].find(function(n){return e.label.toLowerCase().includes(n)})});n&&(l.deviceId={ideal:n.deviceId})})}();return c&&c.then?c.then(u):u()}))}catch(e){return u(e)}var l;return c&&c.then?c.then(void 0,u):c}(0,function(e){var n,t;throw null==(n=s)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=a)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),i=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),a=URL.createObjectURL(i),s=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=n,this.gain=t,this.worklet=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var r=null;return Promise.resolve(function(n,i){try{var s=(u=(r=new AudioContext({sampleRate:t})).createAnalyser(),(c=r.createGain()).connect(u),u.connect(r.destination),Promise.resolve(r.audioWorklet.addModule(a)).then(function(){var n=new AudioWorkletNode(r,"audio-concat-processor");return n.port.postMessage({type:"setFormat",format:o}),n.connect(c),new e(r,u,c,n)}))}catch(e){return i(e)}var u,c;return s&&s.then?s.then(void 0,i):s}(0,function(e){var n;throw null==(n=r)||n.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function u(e){return!!e.type}var c=/*#__PURE__*/function(){function e(e,n,t,o){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=n,this.inputFormat=t,this.outputFormat=o}e.create=function(n){try{var t=null;return Promise.resolve(function(o,r){try{var i=(s=null!=(a=n.origin)?a:"wss://api.elevenlabs.io",c=n.signedUrl?n.signedUrl:s+"/v1/convai/conversation?agent_id="+n.agentId,d=["convai"],n.authorization&&d.push("bearer."+n.authorization),t=new WebSocket(c,d),Promise.resolve(new Promise(function(e,o){t.addEventListener("open",function(){var e,o,r,i,a,s={type:"conversation_initiation_client_data"};n.overrides&&(s.conversation_config_override={agent:{prompt:null==(o=n.overrides.agent)?void 0:o.prompt,first_message:null==(r=n.overrides.agent)?void 0:r.firstMessage,language:null==(i=n.overrides.agent)?void 0:i.language},tts:{voice_id:null==(a=n.overrides.tts)?void 0:a.voiceId}}),n.customLlmExtraBody&&(s.custom_llm_extra_body=n.customLlmExtraBody),null==(e=t)||e.send(JSON.stringify(s))},{once:!0}),t.addEventListener("error",o),t.addEventListener("close",o),t.addEventListener("message",function(n){var t=JSON.parse(n.data);u(t)&&("conversation_initiation_metadata"===t.type?e(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(n){var o=n.conversation_id,r=n.agent_output_audio_format,i=n.user_input_audio_format,a=l(null!=i?i:"pcm_16000"),s=l(r);return new e(t,o,a,s)}))}catch(e){return r(e)}var a,s,c,d;return i&&i.then?i.then(void 0,r):i}(0,function(e){var n;throw null==(n=t)||n.close(),e}))}catch(e){return Promise.reject(e)}};var n=e.prototype;return n.close=function(){this.socket.close()},n.sendMessage=function(e){this.socket.send(JSON.stringify(e))},e}();function l(e){var n=e.split("_"),t=n[0],o=n[1];if(!["pcm","ulaw"].includes(t))throw new Error("Invalid format: "+e);var r=parseInt(o);if(isNaN(r))throw new Error("Invalid sample rate: "+o);return{format:t,sampleRate:r}}function d(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var f={clientTools:{}};function h(e,n,t){if(!e.s){if(t instanceof v){if(!t.s)return void(t.o=h.bind(null,e,n));1&n&&(n=t.s),t=t.v}if(t&&t.then)return void t.then(h.bind(null,e,n),h.bind(null,e,2));e.s=n,e.v=t;var o=e.o;o&&o(e)}}var p={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){},onCanSendFeedbackChange:function(){}},v=/*#__PURE__*/function(){function e(){}return e.prototype.then=function(n,t){var o=new e,r=this.s;if(r){var i=1&r?n:t;if(i){try{h(o,1,i(this.v))}catch(e){h(o,2,e)}return o}return this}return this.o=function(e){try{var r=e.v;1&e.s?h(o,1,n?n(r):r):t?h(o,1,t(r)):h(o,2,r)}catch(e){h(o,2,e)}},o},e}(),m=/*#__PURE__*/function(){function t(e,t,o,r){var i=this,a=this,s=this,c=this,l=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=function(){try{return"connected"!==a.status?Promise.resolve():(a.updateStatus("disconnecting"),a.connection.close(),Promise.resolve(a.input.close()).then(function(){return Promise.resolve(a.output.close()).then(function(){a.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==i.mode&&(i.mode=e,i.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==i.status&&(i.status=e,i.options.onStatusChange({status:e}))},this.updateCanSendFeedback=function(){var e=i.currentEventId!==i.lastFeedbackEventId;i.canSendFeedback!==e&&(i.canSendFeedback=e,i.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=function(e){try{return Promise.resolve(d(function(){var n,t=JSON.parse(e.data);if(u(t)){var o=function(e,n){var t,o=-1;e:{for(var r=0;r<n.length;r++){var i=n[r][0];if(i){var a=i();if(a&&a.then)break e;if(a===e){o=r;break}}else o=r}if(-1!==o){do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then){t=!0;break e}var c=n[o][2];o++}while(c&&!c());return u}}var l=new v,d=h.bind(null,l,2);return(t?u.then(f):a.then(function t(a){for(;;){if(a===e){o=r;break}if(++r===n.length){if(-1!==o)break;return void h(l,1,u)}if(i=n[r][0]){if((a=i())&&a.then)return void a.then(t).then(void 0,d)}else o=r}do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then)return void u.then(f).then(void 0,d);var c=n[o][2];o++}while(c&&!c());h(l,1,u)})).then(void 0,d),l;function f(e){for(;;){var t=n[o][2];if(!t||t())break;o++;for(var r=n[o][1];!r;)o++,r=n[o][1];if((e=r())&&e.then)return void e.then(f).then(void 0,d)}h(l,1,e)}}(t.type,[[function(){return"interruption"},function(){return t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio(),void(n=1)}],[function(){return"agent_response"},function(){return s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response}),void(n=1)}],[function(){return"user_transcript"},function(){return s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript}),void(n=1)}],[function(){return"internal_tentative_agent_response"},function(){return s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response}),void(n=1)}],[function(){return"client_tool_call"},function(){var e=function(){if(s.options.onUnhandledClientToolCall)return s.options.onUnhandledClientToolCall(t.client_tool_call),void(n=1);s.onError("Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",is_error:!0}),n=1},o=function(){if(s.options.clientTools.hasOwnProperty(t.client_tool_call.tool_name)){var e=function(){n=1},o=d(function(){return Promise.resolve(s.options.clientTools[t.client_tool_call.tool_name](t.client_tool_call.parameters)).then(function(e){s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:e,is_error:!1})})},function(e){s.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})});return o&&o.then?o.then(e):e()}}();return o&&o.then?o.then(e):e()},function(){return n||n}],[function(){return"audio"},function(){return s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.currentEventId=t.audio_event.event_id,s.updateCanSendFeedback(),s.updateMode("speaking")),void(n=1)}],[function(){return"ping"},function(){return s.connection.sendMessage({type:"pong",event_id:t.ping_event.event_id}),void(n=1)}],[void 0,function(){return s.options.onDebug(t),void(n=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){s.onError("Failed to parse event data",{event:e})}))}catch(e){return Promise.reject(e)}},this.onInputWorkletMessage=function(e){var n,t;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(n=e.data[0].buffer,t=new Uint8Array(n),window.btoa(String.fromCharCode.apply(String,t)))})},this.onOutputWorkletMessage=function(e){var n=e.data;"process"===n.type&&i.updateMode(n.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return c.output.gain.gain.value=c.volume,c.output.worklet.port.postMessage({type:"clearInterrupted"}),c.output.worklet.port.postMessage({type:"buffer",buffer:n(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return l.updateMode("listening"),l.output.worklet.port.postMessage({type:"interrupt"}),l.output.gain.gain.exponentialRampToValueAtTime(1e-4,l.output.context.currentTime+2),setTimeout(function(){l.output.gain.gain.value=l.volume,l.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,n){console.error(e,n),i.options.onError(e,n)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var n=0,t=0;t<e.length;t++)n+=e[t]/255;return(n/=e.length)<0?0:n>1?1:n},this.getId=function(){return i.connection.conversationId},this.setVolume=function(e){i.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.sendFeedback=function(e){i.canSendFeedback?(i.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:i.currentEventId}),i.lastFeedbackEventId=i.currentEventId,i.updateCanSendFeedback()):console.warn(0===i.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=t,this.input=o,this.output=r,this.options.onConnect({conversationId:t.conversationId}),this.connection.socket.addEventListener("message",function(e){i.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){i.updateStatus("disconnected"),i.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return t.startSession=function(n){try{var o=e({},f,p,n);o.onStatusChange({status:"connecting"}),o.onCanSendFeedbackChange({canSendFeedback:!1});var i=null,a=null,u=null;return Promise.resolve(d(function(){return Promise.resolve(c.create(n)).then(function(c){return a=c,Promise.resolve(Promise.all([r.create(e({},a.inputFormat,{preferHeadphonesForIosDevices:n.preferHeadphonesForIosDevices})),s.create(a.outputFormat)])).then(function(e){return new t(o,a,i=e[0],u=e[1])})})},function(e){var n,t;return o.onStatusChange({status:"disconnected"}),null==(n=a)||n.close(),Promise.resolve(null==(t=i)?void 0:t.close()).then(function(){var n;return Promise.resolve(null==(n=u)?void 0:n.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},t}();function g(e,n,t){return void 0===t&&(t="https://api.elevenlabs.io"),fetch(t+"/v1/convai/conversations/"+e+"/feedback",{method:"POST",body:JSON.stringify({feedback:n?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}export{m as Conversation,g as postOverallFeedback}; | ||
//# sourceMappingURL=lib.module.js.map |
@@ -1,2 +0,2 @@ | ||
!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((e||self).client={})}(this,function(e){function n(){return n=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var o in t)({}).hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e},n.apply(null,arguments)}function t(e){for(var n=window.atob(e),t=n.length,o=new Uint8Array(t),r=0;r<t;r++)o[r]=n.charCodeAt(r);return o.buffer}var o=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(o),i=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=n,this.worklet=t,this.inputStream=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var i=null,a=null;return Promise.resolve(function(n,s){try{var u=function(){function n(){return Promise.resolve(i.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:{sampleRate:{ideal:t},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}}})).then(function(n){var r=i.createMediaStreamSource(a=n),s=new AudioWorkletNode(i,"raw-audio-processor");return s.port.postMessage({type:"setFormat",format:o,sampleRate:t}),r.connect(u),u.connect(s),new e(i,u,s,a)})})}var s=navigator.mediaDevices.getSupportedConstraints().sampleRate,u=(i=new window.AudioContext(s?{sampleRate:t}:{})).createAnalyser(),c=function(){if(!s)return Promise.resolve(i.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return c&&c.then?c.then(n):n()}()}catch(e){return s(e)}return u&&u.then?u.then(void 0,s):u}(0,function(e){var n,t;throw null==(n=a)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=i)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),a=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(a),u=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=n,this.gain=t,this.worklet=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var r=null;return Promise.resolve(function(n,i){try{var a=(u=(r=new AudioContext({sampleRate:t})).createAnalyser(),(c=r.createGain()).connect(u),u.connect(r.destination),Promise.resolve(r.audioWorklet.addModule(s)).then(function(){var n=new AudioWorkletNode(r,"audio-concat-processor");return n.port.postMessage({type:"setFormat",format:o}),n.connect(c),new e(r,u,c,n)}))}catch(e){return i(e)}var u,c;return a&&a.then?a.then(void 0,i):a}(0,function(e){var n;throw null==(n=r)||n.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function c(e){return!!e.type}var l=/*#__PURE__*/function(){function e(e,n,t,o){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=n,this.inputFormat=t,this.outputFormat=o}e.create=function(n){try{var t=null;return Promise.resolve(function(o,r){try{var i=(s=null!=(a=n.origin)?a:"wss://api.elevenlabs.io",u=n.signedUrl?n.signedUrl:s+"/v1/convai/conversation?agent_id="+n.agentId,l=["convai"],n.authorization&&l.push("bearer."+n.authorization),t=new WebSocket(u,l),Promise.resolve(new Promise(function(e,o){t.addEventListener("open",function(){var e,o,r,i,a,s={type:"conversation_initiation_client_data"};n.overrides&&(s.conversation_config_override={agent:{prompt:null==(o=n.overrides.agent)?void 0:o.prompt,first_message:null==(r=n.overrides.agent)?void 0:r.firstMessage,language:null==(i=n.overrides.agent)?void 0:i.language},tts:{voice_id:null==(a=n.overrides.tts)?void 0:a.voiceId}}),n.customLlmExtraBody&&(s.custom_llm_extra_body=n.customLlmExtraBody),null==(e=t)||e.send(JSON.stringify(s))},{once:!0}),t.addEventListener("error",o),t.addEventListener("close",o),t.addEventListener("message",function(n){var t=JSON.parse(n.data);c(t)&&("conversation_initiation_metadata"===t.type?e(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(n){var o=n.conversation_id,r=n.agent_output_audio_format,i=n.user_input_audio_format,a=d(null!=i?i:"pcm_16000"),s=d(r);return new e(t,o,a,s)}))}catch(e){return r(e)}var a,s,u,l;return i&&i.then?i.then(void 0,r):i}(0,function(e){var n;throw null==(n=t)||n.close(),e}))}catch(e){return Promise.reject(e)}};var n=e.prototype;return n.close=function(){this.socket.close()},n.sendMessage=function(e){this.socket.send(JSON.stringify(e))},e}();function d(e){var n=e.split("_"),t=n[0],o=n[1];if(!["pcm","ulaw"].includes(t))throw new Error("Invalid format: "+e);var r=parseInt(o);if(isNaN(r))throw new Error("Invalid sample rate: "+o);return{format:t,sampleRate:r}}function f(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var p={clientTools:{}};function h(e,n,t){if(!e.s){if(t instanceof m){if(!t.s)return void(t.o=h.bind(null,e,n));1&n&&(n=t.s),t=t.v}if(t&&t.then)return void t.then(h.bind(null,e,n),h.bind(null,e,2));e.s=n,e.v=t;var o=e.o;o&&o(e)}}var v={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){},onCanSendFeedbackChange:function(){}},m=/*#__PURE__*/function(){function e(){}return e.prototype.then=function(n,t){var o=new e,r=this.s;if(r){var i=1&r?n:t;if(i){try{h(o,1,i(this.v))}catch(e){h(o,2,e)}return o}return this}return this.o=function(e){try{var r=e.v;1&e.s?h(o,1,n?n(r):r):t?h(o,1,t(r)):h(o,2,r)}catch(e){h(o,2,e)}},o},e}();e.Conversation=/*#__PURE__*/function(){function e(e,n,o,r){var i=this,a=this,s=this,u=this,l=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=function(){try{return"connected"!==a.status?Promise.resolve():(a.updateStatus("disconnecting"),a.connection.close(),Promise.resolve(a.input.close()).then(function(){return Promise.resolve(a.output.close()).then(function(){a.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==i.mode&&(i.mode=e,i.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==i.status&&(i.status=e,i.options.onStatusChange({status:e}))},this.updateCanSendFeedback=function(){var e=i.currentEventId!==i.lastFeedbackEventId;i.canSendFeedback!==e&&(i.canSendFeedback=e,i.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=function(e){try{return Promise.resolve(f(function(){var n,t=JSON.parse(e.data);if(c(t)){var o=function(e,n){var t,o=-1;e:{for(var r=0;r<n.length;r++){var i=n[r][0];if(i){var a=i();if(a&&a.then)break e;if(a===e){o=r;break}}else o=r}if(-1!==o){do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then){t=!0;break e}var c=n[o][2];o++}while(c&&!c());return u}}var l=new m,d=h.bind(null,l,2);return(t?u.then(f):a.then(function t(a){for(;;){if(a===e){o=r;break}if(++r===n.length){if(-1!==o)break;return void h(l,1,u)}if(i=n[r][0]){if((a=i())&&a.then)return void a.then(t).then(void 0,d)}else o=r}do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then)return void u.then(f).then(void 0,d);var c=n[o][2];o++}while(c&&!c());h(l,1,u)})).then(void 0,d),l;function f(e){for(;;){var t=n[o][2];if(!t||t())break;o++;for(var r=n[o][1];!r;)o++,r=n[o][1];if((e=r())&&e.then)return void e.then(f).then(void 0,d)}h(l,1,e)}}(t.type,[[function(){return"interruption"},function(){return t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio(),void(n=1)}],[function(){return"agent_response"},function(){return s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response}),void(n=1)}],[function(){return"user_transcript"},function(){return s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript}),void(n=1)}],[function(){return"internal_tentative_agent_response"},function(){return s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response}),void(n=1)}],[function(){return"client_tool_call"},function(){var e=function(){if(s.options.onUnhandledClientToolCall)return s.options.onUnhandledClientToolCall(t.client_tool_call),void(n=1);s.onError("Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",is_error:!0}),n=1},o=function(){if(s.options.clientTools.hasOwnProperty(t.client_tool_call.tool_name)){var e=function(){n=1},o=f(function(){return Promise.resolve(s.options.clientTools[t.client_tool_call.tool_name](t.client_tool_call.parameters)).then(function(e){s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:e,is_error:!1})})},function(e){s.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})});return o&&o.then?o.then(e):e()}}();return o&&o.then?o.then(e):e()},function(){return n||n}],[function(){return"audio"},function(){return s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.currentEventId=t.audio_event.event_id,s.updateCanSendFeedback(),s.updateMode("speaking")),void(n=1)}],[function(){return"ping"},function(){return s.connection.sendMessage({type:"pong",event_id:t.ping_event.event_id}),void(n=1)}],[void 0,function(){return s.options.onDebug(t),void(n=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){s.onError("Failed to parse event data",{event:e})}))}catch(e){return Promise.reject(e)}},this.onInputWorkletMessage=function(e){var n,t;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(n=e.data[0].buffer,t=new Uint8Array(n),window.btoa(String.fromCharCode.apply(String,t)))})},this.onOutputWorkletMessage=function(e){var n=e.data;"process"===n.type&&i.updateMode(n.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return u.output.gain.gain.value=u.volume,u.output.worklet.port.postMessage({type:"clearInterrupted"}),u.output.worklet.port.postMessage({type:"buffer",buffer:t(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return l.updateMode("listening"),l.output.worklet.port.postMessage({type:"interrupt"}),l.output.gain.gain.exponentialRampToValueAtTime(1e-4,l.output.context.currentTime+2),setTimeout(function(){l.output.gain.gain.value=l.volume,l.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,n){console.error(e,n),i.options.onError(e,n)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var n=0,t=0;t<e.length;t++)n+=e[t]/255;return(n/=e.length)<0?0:n>1?1:n},this.getId=function(){return i.connection.conversationId},this.setVolume=function(e){i.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.sendFeedback=function(e){i.canSendFeedback?(i.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:i.currentEventId}),i.lastFeedbackEventId=i.currentEventId,i.updateCanSendFeedback()):console.warn(0===i.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=n,this.input=o,this.output=r,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(e){i.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){i.updateStatus("disconnected"),i.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return e.startSession=function(t){try{var o=n({},p,v,t);o.onStatusChange({status:"connecting"}),o.onCanSendFeedbackChange({canSendFeedback:!1});var r=null,a=null,s=null;return Promise.resolve(f(function(){return Promise.resolve(l.create(t)).then(function(n){return a=n,Promise.resolve(Promise.all([i.create(a.inputFormat),u.create(a.outputFormat)])).then(function(n){return new e(o,a,r=n[0],s=n[1])})})},function(e){var n,t;return o.onStatusChange({status:"disconnected"}),null==(n=a)||n.close(),Promise.resolve(null==(t=r)?void 0:t.close()).then(function(){var n;return Promise.resolve(null==(n=s)?void 0:n.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},e}(),e.postOverallFeedback=function(e,n,t){return void 0===t&&(t="https://api.elevenlabs.io"),fetch(t+"/v1/convai/conversations/"+e+"/feedback",{method:"POST",body:JSON.stringify({feedback:n?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}}); | ||
!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((e||self).client={})}(this,function(e){function n(){return n=Object.assign?Object.assign.bind():function(e){for(var n=1;n<arguments.length;n++){var t=arguments[n];for(var o in t)({}).hasOwnProperty.call(t,o)&&(e[o]=t[o])}return e},n.apply(null,arguments)}function t(e){for(var n=window.atob(e),t=n.length,o=new Uint8Array(t),r=0;r<t;r++)o[r]=n.charCodeAt(r);return o.buffer}var o=new Blob(['\n const BIAS = 0x84;\n const CLIP = 32635;\n const encodeTable = [\n 0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,\n 4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,\n 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7\n ];\n \n function encodeSample(sample) {\n let sign;\n let exponent;\n let mantissa;\n let muLawSample;\n sign = (sample >> 8) & 0x80;\n if (sign !== 0) sample = -sample;\n sample = sample + BIAS;\n if (sample > CLIP) sample = CLIP;\n exponent = encodeTable[(sample>>7) & 0xFF];\n mantissa = (sample >> (exponent+3)) & 0x0F;\n muLawSample = ~(sign | (exponent << 4) | mantissa);\n \n return muLawSample;\n }\n \n class RawAudioProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n \n this.port.onmessage = ({ data }) => {\n this.buffer = []; // Initialize an empty buffer\n this.bufferSize = data.sampleRate / 4;\n \n if (globalThis.LibSampleRate && sampleRate !== data.sampleRate) {\n globalThis.LibSampleRate.create(1, sampleRate, data.sampleRate).then(resampler => {\n this.resampler = resampler;\n });\n } \n };\n }\n process(inputs) {\n if (!this.buffer) {\n return true;\n }\n \n const input = inputs[0]; // Get the first input node\n if (input.length > 0) {\n let channelData = input[0]; // Get the first channel\'s data\n\n // Resample the audio if necessary\n if (this.resampler) {\n channelData = this.resampler.full(channelData);\n }\n\n // Add channel data to the buffer\n this.buffer.push(...channelData);\n // Get max volume \n let sum = 0.0;\n for (let i = 0; i < channelData.length; i++) {\n sum += channelData[i] * channelData[i];\n }\n const maxVolume = Math.sqrt(sum / channelData.length);\n // Check if buffer size has reached or exceeded the threshold\n if (this.buffer.length >= this.bufferSize) {\n const float32Array = new Float32Array(this.buffer)\n let encodedArray = this.format === "ulaw"\n ? new Uint8Array(float32Array.length)\n : new Int16Array(float32Array.length);\n\n // Iterate through the Float32Array and convert each sample to PCM16\n for (let i = 0; i < float32Array.length; i++) {\n // Clamp the value to the range [-1, 1]\n let sample = Math.max(-1, Math.min(1, float32Array[i]));\n\n // Scale the sample to the range [-32768, 32767]\n let value = sample < 0 ? sample * 32768 : sample * 32767;\n if (this.format === "ulaw") {\n value = encodeSample(Math.round(value));\n }\n\n encodedArray[i] = value;\n }\n\n // Send the buffered data to the main script\n this.port.postMessage([encodedArray, maxVolume]);\n\n // Clear the buffer after sending\n this.buffer = [];\n }\n }\n return true; // Continue processing\n }\n }\n registerProcessor("raw-audio-processor", RawAudioProcessor);\n '],{type:"application/javascript"}),r=URL.createObjectURL(o),i=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.worklet=void 0,this.inputStream=void 0,this.context=e,this.analyser=n,this.worklet=t,this.inputStream=o}return e.create=function(n){var t=n.sampleRate,o=n.format,i=n.preferHeadphonesForIosDevices;try{var a=null,s=null;return Promise.resolve(function(n,u){try{var c=(l={sampleRate:{ideal:t},echoCancellation:{ideal:!0},noiseSuppression:{ideal:!0}},Promise.resolve(navigator.mediaDevices.getUserMedia({audio:!0})).then(function(n){function u(){function n(){return Promise.resolve(a.audioWorklet.addModule(r)).then(function(){return Promise.resolve(navigator.mediaDevices.getUserMedia({audio:l})).then(function(n){var r=a.createMediaStreamSource(s=n),i=new AudioWorkletNode(a,"raw-audio-processor");return i.port.postMessage({type:"setFormat",format:o,sampleRate:t}),r.connect(u),u.connect(i),new e(a,u,i,s)})})}var i=navigator.mediaDevices.getSupportedConstraints().sampleRate,u=(a=new window.AudioContext(i?{sampleRate:t}:{})).createAnalyser(),c=function(){if(!i)return Promise.resolve(a.audioWorklet.addModule("https://cdn.jsdelivr.net/npm/@alexanderolsen/libsamplerate-js@2.1.2/dist/libsamplerate.worklet.js")).then(function(){})}();return c&&c.then?c.then(n):n()}null==n||n.getTracks().forEach(function(e){return e.stop()});var c=function(){if((["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document)&&i)return Promise.resolve(window.navigator.mediaDevices.enumerateDevices()).then(function(e){var n=e.find(function(e){return"audioinput"===e.kind&&["airpod","headphone","earphone"].find(function(n){return e.label.toLowerCase().includes(n)})});n&&(l.deviceId={ideal:n.deviceId})})}();return c&&c.then?c.then(u):u()}))}catch(e){return u(e)}var l;return c&&c.then?c.then(void 0,u):c}(0,function(e){var n,t;throw null==(n=s)||n.getTracks().forEach(function(e){return e.stop()}),null==(t=a)||t.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return this.inputStream.getTracks().forEach(function(e){return e.stop()}),Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}(),a=new Blob(['\n const decodeTable = [0,132,396,924,1980,4092,8316,16764];\n \n export function decodeSample(muLawSample) {\n let sign;\n let exponent;\n let mantissa;\n let sample;\n muLawSample = ~muLawSample;\n sign = (muLawSample & 0x80);\n exponent = (muLawSample >> 4) & 0x07;\n mantissa = muLawSample & 0x0F;\n sample = decodeTable[exponent] + (mantissa << (exponent+3));\n if (sign !== 0) sample = -sample;\n\n return sample;\n }\n \n class AudioConcatProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n this.buffers = []; // Initialize an empty buffer\n this.cursor = 0;\n this.currentBuffer = null;\n this.wasInterrupted = false;\n this.finished = false;\n \n this.port.onmessage = ({ data }) => {\n switch (data.type) {\n case "setFormat":\n this.format = data.format;\n break;\n case "buffer":\n this.wasInterrupted = false;\n this.buffers.push(\n this.format === "ulaw"\n ? new Uint8Array(data.buffer)\n : new Int16Array(data.buffer)\n );\n break;\n case "interrupt":\n this.wasInterrupted = true;\n break;\n case "clearInterrupted":\n if (this.wasInterrupted) {\n this.wasInterrupted = false;\n this.buffers = [];\n this.currentBuffer = null;\n }\n }\n };\n }\n process(_, outputs) {\n let finished = false;\n const output = outputs[0][0];\n for (let i = 0; i < output.length; i++) {\n if (!this.currentBuffer) {\n if (this.buffers.length === 0) {\n finished = true;\n break;\n }\n this.currentBuffer = this.buffers.shift();\n this.cursor = 0;\n }\n\n let value = this.currentBuffer[this.cursor];\n if (this.format === "ulaw") {\n value = decodeSample(value);\n }\n output[i] = value / 32768;\n this.cursor++;\n\n if (this.cursor >= this.currentBuffer.length) {\n this.currentBuffer = null;\n }\n }\n\n if (this.finished !== finished) {\n this.finished = finished;\n this.port.postMessage({ type: "process", finished });\n }\n\n return true; // Continue processing\n }\n }\n\n registerProcessor("audio-concat-processor", AudioConcatProcessor);\n '],{type:"application/javascript"}),s=URL.createObjectURL(a),u=/*#__PURE__*/function(){function e(e,n,t,o){this.context=void 0,this.analyser=void 0,this.gain=void 0,this.worklet=void 0,this.context=e,this.analyser=n,this.gain=t,this.worklet=o}return e.create=function(n){var t=n.sampleRate,o=n.format;try{var r=null;return Promise.resolve(function(n,i){try{var a=(u=(r=new AudioContext({sampleRate:t})).createAnalyser(),(c=r.createGain()).connect(u),u.connect(r.destination),Promise.resolve(r.audioWorklet.addModule(s)).then(function(){var n=new AudioWorkletNode(r,"audio-concat-processor");return n.port.postMessage({type:"setFormat",format:o}),n.connect(c),new e(r,u,c,n)}))}catch(e){return i(e)}var u,c;return a&&a.then?a.then(void 0,i):a}(0,function(e){var n;throw null==(n=r)||n.close(),e}))}catch(e){return Promise.reject(e)}},e.prototype.close=function(){try{return Promise.resolve(this.context.close()).then(function(){})}catch(e){return Promise.reject(e)}},e}();function c(e){return!!e.type}var l=/*#__PURE__*/function(){function e(e,n,t,o){this.socket=void 0,this.conversationId=void 0,this.inputFormat=void 0,this.outputFormat=void 0,this.socket=e,this.conversationId=n,this.inputFormat=t,this.outputFormat=o}e.create=function(n){try{var t=null;return Promise.resolve(function(o,r){try{var i=(s=null!=(a=n.origin)?a:"wss://api.elevenlabs.io",u=n.signedUrl?n.signedUrl:s+"/v1/convai/conversation?agent_id="+n.agentId,l=["convai"],n.authorization&&l.push("bearer."+n.authorization),t=new WebSocket(u,l),Promise.resolve(new Promise(function(e,o){t.addEventListener("open",function(){var e,o,r,i,a,s={type:"conversation_initiation_client_data"};n.overrides&&(s.conversation_config_override={agent:{prompt:null==(o=n.overrides.agent)?void 0:o.prompt,first_message:null==(r=n.overrides.agent)?void 0:r.firstMessage,language:null==(i=n.overrides.agent)?void 0:i.language},tts:{voice_id:null==(a=n.overrides.tts)?void 0:a.voiceId}}),n.customLlmExtraBody&&(s.custom_llm_extra_body=n.customLlmExtraBody),null==(e=t)||e.send(JSON.stringify(s))},{once:!0}),t.addEventListener("error",o),t.addEventListener("close",o),t.addEventListener("message",function(n){var t=JSON.parse(n.data);c(t)&&("conversation_initiation_metadata"===t.type?e(t.conversation_initiation_metadata_event):console.warn("First received message is not conversation metadata."))},{once:!0})})).then(function(n){var o=n.conversation_id,r=n.agent_output_audio_format,i=n.user_input_audio_format,a=d(null!=i?i:"pcm_16000"),s=d(r);return new e(t,o,a,s)}))}catch(e){return r(e)}var a,s,u,l;return i&&i.then?i.then(void 0,r):i}(0,function(e){var n;throw null==(n=t)||n.close(),e}))}catch(e){return Promise.reject(e)}};var n=e.prototype;return n.close=function(){this.socket.close()},n.sendMessage=function(e){this.socket.send(JSON.stringify(e))},e}();function d(e){var n=e.split("_"),t=n[0],o=n[1];if(!["pcm","ulaw"].includes(t))throw new Error("Invalid format: "+e);var r=parseInt(o);if(isNaN(r))throw new Error("Invalid sample rate: "+o);return{format:t,sampleRate:r}}function f(e,n){try{var t=e()}catch(e){return n(e)}return t&&t.then?t.then(void 0,n):t}var p={clientTools:{}};function h(e,n,t){if(!e.s){if(t instanceof m){if(!t.s)return void(t.o=h.bind(null,e,n));1&n&&(n=t.s),t=t.v}if(t&&t.then)return void t.then(h.bind(null,e,n),h.bind(null,e,2));e.s=n,e.v=t;var o=e.o;o&&o(e)}}var v={onConnect:function(){},onDebug:function(){},onDisconnect:function(){},onError:function(){},onMessage:function(){},onModeChange:function(){},onStatusChange:function(){},onCanSendFeedbackChange:function(){}},m=/*#__PURE__*/function(){function e(){}return e.prototype.then=function(n,t){var o=new e,r=this.s;if(r){var i=1&r?n:t;if(i){try{h(o,1,i(this.v))}catch(e){h(o,2,e)}return o}return this}return this.o=function(e){try{var r=e.v;1&e.s?h(o,1,n?n(r):r):t?h(o,1,t(r)):h(o,2,r)}catch(e){h(o,2,e)}},o},e}();e.Conversation=/*#__PURE__*/function(){function e(e,n,o,r){var i=this,a=this,s=this,u=this,l=this;this.options=void 0,this.connection=void 0,this.input=void 0,this.output=void 0,this.lastInterruptTimestamp=0,this.mode="listening",this.status="connecting",this.inputFrequencyData=void 0,this.outputFrequencyData=void 0,this.volume=1,this.currentEventId=1,this.lastFeedbackEventId=1,this.canSendFeedback=!1,this.endSession=function(){try{return"connected"!==a.status?Promise.resolve():(a.updateStatus("disconnecting"),a.connection.close(),Promise.resolve(a.input.close()).then(function(){return Promise.resolve(a.output.close()).then(function(){a.updateStatus("disconnected")})}))}catch(e){return Promise.reject(e)}},this.updateMode=function(e){e!==i.mode&&(i.mode=e,i.options.onModeChange({mode:e}))},this.updateStatus=function(e){e!==i.status&&(i.status=e,i.options.onStatusChange({status:e}))},this.updateCanSendFeedback=function(){var e=i.currentEventId!==i.lastFeedbackEventId;i.canSendFeedback!==e&&(i.canSendFeedback=e,i.options.onCanSendFeedbackChange({canSendFeedback:e}))},this.onEvent=function(e){try{return Promise.resolve(f(function(){var n,t=JSON.parse(e.data);if(c(t)){var o=function(e,n){var t,o=-1;e:{for(var r=0;r<n.length;r++){var i=n[r][0];if(i){var a=i();if(a&&a.then)break e;if(a===e){o=r;break}}else o=r}if(-1!==o){do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then){t=!0;break e}var c=n[o][2];o++}while(c&&!c());return u}}var l=new m,d=h.bind(null,l,2);return(t?u.then(f):a.then(function t(a){for(;;){if(a===e){o=r;break}if(++r===n.length){if(-1!==o)break;return void h(l,1,u)}if(i=n[r][0]){if((a=i())&&a.then)return void a.then(t).then(void 0,d)}else o=r}do{for(var s=n[o][1];!s;)o++,s=n[o][1];var u=s();if(u&&u.then)return void u.then(f).then(void 0,d);var c=n[o][2];o++}while(c&&!c());h(l,1,u)})).then(void 0,d),l;function f(e){for(;;){var t=n[o][2];if(!t||t())break;o++;for(var r=n[o][1];!r;)o++,r=n[o][1];if((e=r())&&e.then)return void e.then(f).then(void 0,d)}h(l,1,e)}}(t.type,[[function(){return"interruption"},function(){return t.interruption_event&&(s.lastInterruptTimestamp=t.interruption_event.event_id),s.fadeOutAudio(),void(n=1)}],[function(){return"agent_response"},function(){return s.options.onMessage({source:"ai",message:t.agent_response_event.agent_response}),void(n=1)}],[function(){return"user_transcript"},function(){return s.options.onMessage({source:"user",message:t.user_transcription_event.user_transcript}),void(n=1)}],[function(){return"internal_tentative_agent_response"},function(){return s.options.onDebug({type:"tentative_agent_response",response:t.tentative_agent_response_internal_event.tentative_agent_response}),void(n=1)}],[function(){return"client_tool_call"},function(){var e=function(){if(s.options.onUnhandledClientToolCall)return s.options.onUnhandledClientToolCall(t.client_tool_call),void(n=1);s.onError("Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool with name "+t.client_tool_call.tool_name+" is not defined on client",is_error:!0}),n=1},o=function(){if(s.options.clientTools.hasOwnProperty(t.client_tool_call.tool_name)){var e=function(){n=1},o=f(function(){return Promise.resolve(s.options.clientTools[t.client_tool_call.tool_name](t.client_tool_call.parameters)).then(function(e){s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:e,is_error:!1})})},function(e){s.onError("Client tool execution failed with following error: "+(null==e?void 0:e.message),{clientToolName:t.client_tool_call.tool_name}),s.connection.sendMessage({type:"client_tool_result",tool_call_id:t.client_tool_call.tool_call_id,result:"Client tool execution failed: "+(null==e?void 0:e.message),is_error:!0})});return o&&o.then?o.then(e):e()}}();return o&&o.then?o.then(e):e()},function(){return n||n}],[function(){return"audio"},function(){return s.lastInterruptTimestamp<=t.audio_event.event_id&&(s.addAudioBase64Chunk(t.audio_event.audio_base_64),s.currentEventId=t.audio_event.event_id,s.updateCanSendFeedback(),s.updateMode("speaking")),void(n=1)}],[function(){return"ping"},function(){return s.connection.sendMessage({type:"pong",event_id:t.ping_event.event_id}),void(n=1)}],[void 0,function(){return s.options.onDebug(t),void(n=1)}]]);return o&&o.then?o.then(function(){}):void 0}},function(){s.onError("Failed to parse event data",{event:e})}))}catch(e){return Promise.reject(e)}},this.onInputWorkletMessage=function(e){var n,t;"connected"===i.status&&i.connection.sendMessage({user_audio_chunk:(n=e.data[0].buffer,t=new Uint8Array(n),window.btoa(String.fromCharCode.apply(String,t)))})},this.onOutputWorkletMessage=function(e){var n=e.data;"process"===n.type&&i.updateMode(n.finished?"listening":"speaking")},this.addAudioBase64Chunk=function(e){try{return u.output.gain.gain.value=u.volume,u.output.worklet.port.postMessage({type:"clearInterrupted"}),u.output.worklet.port.postMessage({type:"buffer",buffer:t(e)}),Promise.resolve()}catch(e){return Promise.reject(e)}},this.fadeOutAudio=function(){try{return l.updateMode("listening"),l.output.worklet.port.postMessage({type:"interrupt"}),l.output.gain.gain.exponentialRampToValueAtTime(1e-4,l.output.context.currentTime+2),setTimeout(function(){l.output.gain.gain.value=l.volume,l.output.worklet.port.postMessage({type:"clearInterrupted"})},2e3),Promise.resolve()}catch(e){return Promise.reject(e)}},this.onError=function(e,n){console.error(e,n),i.options.onError(e,n)},this.calculateVolume=function(e){if(0===e.length)return 0;for(var n=0,t=0;t<e.length;t++)n+=e[t]/255;return(n/=e.length)<0?0:n>1?1:n},this.getId=function(){return i.connection.conversationId},this.setVolume=function(e){i.volume=e.volume},this.getInputByteFrequencyData=function(){return null!=i.inputFrequencyData||(i.inputFrequencyData=new Uint8Array(i.input.analyser.frequencyBinCount)),i.input.analyser.getByteFrequencyData(i.inputFrequencyData),i.inputFrequencyData},this.getOutputByteFrequencyData=function(){return null!=i.outputFrequencyData||(i.outputFrequencyData=new Uint8Array(i.output.analyser.frequencyBinCount)),i.output.analyser.getByteFrequencyData(i.outputFrequencyData),i.outputFrequencyData},this.getInputVolume=function(){return i.calculateVolume(i.getInputByteFrequencyData())},this.getOutputVolume=function(){return i.calculateVolume(i.getOutputByteFrequencyData())},this.sendFeedback=function(e){i.canSendFeedback?(i.connection.sendMessage({type:"feedback",score:e?"like":"dislike",event_id:i.currentEventId}),i.lastFeedbackEventId=i.currentEventId,i.updateCanSendFeedback()):console.warn(0===i.lastFeedbackEventId?"Cannot send feedback: the conversation has not started yet.":"Cannot send feedback: feedback has already been sent for the current response.")},this.options=e,this.connection=n,this.input=o,this.output=r,this.options.onConnect({conversationId:n.conversationId}),this.connection.socket.addEventListener("message",function(e){i.onEvent(e)}),this.connection.socket.addEventListener("error",function(e){i.updateStatus("disconnected"),i.onError("Socket error",e)}),this.connection.socket.addEventListener("close",function(){i.updateStatus("disconnected"),i.options.onDisconnect()}),this.input.worklet.port.onmessage=this.onInputWorkletMessage,this.output.worklet.port.onmessage=this.onOutputWorkletMessage,this.updateStatus("connected")}return e.startSession=function(t){try{var o=n({},p,v,t);o.onStatusChange({status:"connecting"}),o.onCanSendFeedbackChange({canSendFeedback:!1});var r=null,a=null,s=null;return Promise.resolve(f(function(){return Promise.resolve(l.create(t)).then(function(c){return a=c,Promise.resolve(Promise.all([i.create(n({},a.inputFormat,{preferHeadphonesForIosDevices:t.preferHeadphonesForIosDevices})),u.create(a.outputFormat)])).then(function(n){return new e(o,a,r=n[0],s=n[1])})})},function(e){var n,t;return o.onStatusChange({status:"disconnected"}),null==(n=a)||n.close(),Promise.resolve(null==(t=r)?void 0:t.close()).then(function(){var n;return Promise.resolve(null==(n=s)?void 0:n.close()).then(function(){throw e})})}))}catch(e){return Promise.reject(e)}},e}(),e.postOverallFeedback=function(e,n,t){return void 0===t&&(t="https://api.elevenlabs.io"),fetch(t+"/v1/convai/conversations/"+e+"/feedback",{method:"POST",body:JSON.stringify({feedback:n?"like":"dislike"}),headers:{"Content-Type":"application/json"}})}}); | ||
//# sourceMappingURL=lib.umd.js.map |
import { FormatConfig } from "./connection"; | ||
export type InputConfig = { | ||
preferHeadphonesForIosDevices?: boolean; | ||
}; | ||
export declare class Input { | ||
@@ -7,5 +10,5 @@ readonly context: AudioContext; | ||
readonly inputStream: MediaStream; | ||
static create({ sampleRate, format, }: FormatConfig): Promise<Input>; | ||
static create({ sampleRate, format, preferHeadphonesForIosDevices, }: FormatConfig & InputConfig): Promise<Input>; | ||
private constructor(); | ||
close(): Promise<void>; | ||
} |
{ | ||
"name": "@11labs/client", | ||
"version": "0.0.5-beta.1", | ||
"version": "0.0.5-beta.2", | ||
"description": "ElevenLabs JavaScript Client Library", | ||
@@ -5,0 +5,0 @@ "main": "./dist/lib.umd.js", |
@@ -37,7 +37,12 @@ # ElevenLabs JavaScript Client Library | ||
This will kick off the websocket connection and start using microphone to communicate with the ElevenLabs Conversational AI agent. Consider explaining and allowing microphone access in your apps UI before the Conversation kicks off: | ||
This will kick off the websocket connection and start using microphone to communicate with the ElevenLabs Conversational AI agent. Consider explaining and allowing microphone access in your apps UI before the Conversation kicks off. The microphone may also be blocked for the current page by default, resulting in the allow prompt not showing up at all. You should handle such use case in your application and display appropriate message to the user: | ||
```js | ||
// call after explaning to the user why the microphone access is needed | ||
await navigator.mediaDevices.getUserMedia(); | ||
// handle errors and show appropriate message to the user | ||
try { | ||
await navigator.mediaDevices.getUserMedia(); | ||
} catch { | ||
// handle error | ||
} | ||
``` | ||
@@ -158,2 +163,12 @@ | ||
#### Prefer Headphones for iOS Devices | ||
While this SDK leaves the choice of audio input/output device to the browser/system, iOS Safari seem to prefer the built-in speaker over headphones even when bluetooth device is in use. If you want to "force" the use of headphones on iOS devices when available, you can use the following option. Please, keep in mind that this is not guaranteed, since this functionality is not provided by the browser. System audio should be the default choice. | ||
```ts | ||
const conversation = await Conversation.startSession({ | ||
preferHeadphonesForIosDevices: true, | ||
}); | ||
``` | ||
#### Return value | ||
@@ -160,0 +175,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
304036
523
235