Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@speechly/browser-client

Package Overview
Dependencies
Maintainers
6
Versions
79
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@speechly/browser-client - npm Package Compare versions

Comparing version 1.3.0 to 1.4.0

6

core/speechly.umd.min.js

@@ -1,2 +0,2 @@

!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).Speechly={})}(this,(function(t){"use strict";var e;function n(t){var e;return null!==(e=s.get(t))&&void 0!==e?e:i}t.ClientState=void 0,(e=t.ClientState||(t.ClientState={}))[e.Failed=0]="Failed",e[e.NoBrowserSupport=1]="NoBrowserSupport",e[e.NoAudioConsent=2]="NoAudioConsent",e[e.Disconnected=3]="Disconnected",e[e.Disconnecting=4]="Disconnecting",e[e.Connecting=5]="Connecting",e[e.Connected=6]="Connected",e[e.Starting=7]="Starting",e[e.Stopping=8]="Stopping",e[e.Recording=9]="Recording";const i="Unknown",s=new Map([[t.ClientState.Failed,"Failed"],[t.ClientState.NoBrowserSupport,"NoBrowserSupport"],[t.ClientState.NoAudioConsent,"NoAudioConsent"],[t.ClientState.Disconnecting,"Disconnecting"],[t.ClientState.Disconnected,"Disconnected"],[t.ClientState.Connecting,"Connecting"],[t.ClientState.Connected,"Connected"],[t.ClientState.Starting,"Starting"],[t.ClientState.Stopping,"Stopping"],[t.ClientState.Recording,"Recording"]]);
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).Speechly={})}(this,(function(t){"use strict";var e;function n(t){var e;return null!==(e=s.get(t))&&void 0!==e?e:i}t.ClientState=void 0,(e=t.ClientState||(t.ClientState={}))[e.Failed=0]="Failed",e[e.NoBrowserSupport=1]="NoBrowserSupport",e[e.NoAudioConsent=2]="NoAudioConsent",e[e.__UnrecoverableErrors=3]="__UnrecoverableErrors",e[e.Disconnected=4]="Disconnected",e[e.Disconnecting=5]="Disconnecting",e[e.Connecting=6]="Connecting",e[e.Preinitialized=7]="Preinitialized",e[e.Initializing=8]="Initializing",e[e.Connected=9]="Connected",e[e.Stopping=10]="Stopping",e[e.Starting=11]="Starting",e[e.Recording=12]="Recording";const i="Unknown",s=new Map([[t.ClientState.Failed,"Failed"],[t.ClientState.NoBrowserSupport,"NoBrowserSupport"],[t.ClientState.NoAudioConsent,"NoAudioConsent"],[t.ClientState.Disconnecting,"Disconnecting"],[t.ClientState.Disconnected,"Disconnected"],[t.ClientState.Connecting,"Connecting"],[t.ClientState.Preinitialized,"Preinitialized"],[t.ClientState.Initializing,"Initializing"],[t.ClientState.Connected,"Connected"],[t.ClientState.Stopping,"Stopping"],[t.ClientState.Starting,"Starting"],[t.ClientState.Recording,"Recording"]]);
/*! *****************************************************************************

@@ -16,5 +16,5 @@ Copyright (c) Microsoft Corporation.

***************************************************************************** */
function o(t,e,n,i){return new(n||(n=Promise))((function(s,o){function a(t){try{c(i.next(t))}catch(t){o(t)}}function r(t){try{c(i.throw(t))}catch(t){o(t)}}function c(t){var e;t.done?s(t.value):(e=t.value,e instanceof n?e:new n((function(t){t(e)}))).then(a,r)}c((i=i.apply(t,e||[])).next())}))}var a,r=new Uint8Array(16);function c(){if(!a&&!(a="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return a(r)}var l=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function h(t){return"string"==typeof t&&l.test(t)}for(var d=[],p=0;p<256;++p)d.push((p+256).toString(16).substr(1));function u(t,e,n){var i=(t=t||{}).random||(t.rng||c)();if(i[6]=15&i[6]|64,i[8]=63&i[8]|128,e){n=n||0;for(var s=0;s<16;++s)e[n+s]=i[s];return e}return function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(d[t[e+0]]+d[t[e+1]]+d[t[e+2]]+d[t[e+3]]+"-"+d[t[e+4]]+d[t[e+5]]+"-"+d[t[e+6]]+d[t[e+7]]+"-"+d[t[e+8]]+d[t[e+9]]+"-"+d[t[e+10]]+d[t[e+11]]+d[t[e+12]]+d[t[e+13]]+d[t[e+14]]+d[t[e+15]]).toLowerCase();if(!h(n))throw TypeError("Stringified UUID is invalid");return n}(i)}var f="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},S={exports:{}};
function o(t,e,n,i){return new(n||(n=Promise))((function(s,o){function a(t){try{l(i.next(t))}catch(t){o(t)}}function r(t){try{l(i.throw(t))}catch(t){o(t)}}function l(t){var e;t.done?s(t.value):(e=t.value,e instanceof n?e:new n((function(t){t(e)}))).then(a,r)}l((i=i.apply(t,e||[])).next())}))}var a,r=new Uint8Array(16);function l(){if(!a&&!(a="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return a(r)}var c=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function h(t){return"string"==typeof t&&c.test(t)}for(var d=[],u=0;u<256;++u)d.push((u+256).toString(16).substr(1));function p(t,e,n){var i=(t=t||{}).random||(t.rng||l)();if(i[6]=15&i[6]|64,i[8]=63&i[8]|128,e){n=n||0;for(var s=0;s<16;++s)e[n+s]=i[s];return e}return function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(d[t[e+0]]+d[t[e+1]]+d[t[e+2]]+d[t[e+3]]+"-"+d[t[e+4]]+d[t[e+5]]+"-"+d[t[e+6]]+d[t[e+7]]+"-"+d[t[e+8]]+d[t[e+9]]+"-"+d[t[e+10]]+d[t[e+11]]+d[t[e+12]]+d[t[e+13]]+d[t[e+14]]+d[t[e+15]]).toLowerCase();if(!h(n))throw TypeError("Stringified UUID is invalid");return n}(i)}var f="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},S={exports:{}};
/*! http://mths.be/base64 v0.1.0 by @mathias | MIT license */
!function(t,e){!function(n){var i=e,s=t&&t.exports==i&&t,o="object"==typeof f&&f;o.global!==o&&o.window!==o||(n=o);var a=function(t){this.message=t};(a.prototype=new Error).name="InvalidCharacterError";var r=function(t){throw new a(t)},c="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",l=/[\t\n\f\r ]/g,h={encode:function(t){t=String(t),/[^\0-\xFF]/.test(t)&&r("The string to be encoded contains characters outside of the Latin1 range.");for(var e,n,i,s,o=t.length%3,a="",l=-1,h=t.length-o;++l<h;)e=t.charCodeAt(l)<<16,n=t.charCodeAt(++l)<<8,i=t.charCodeAt(++l),a+=c.charAt((s=e+n+i)>>18&63)+c.charAt(s>>12&63)+c.charAt(s>>6&63)+c.charAt(63&s);return 2==o?(e=t.charCodeAt(l)<<8,n=t.charCodeAt(++l),a+=c.charAt((s=e+n)>>10)+c.charAt(s>>4&63)+c.charAt(s<<2&63)+"="):1==o&&(s=t.charCodeAt(l),a+=c.charAt(s>>2)+c.charAt(s<<4&63)+"=="),a},decode:function(t){var e=(t=String(t).replace(l,"")).length;e%4==0&&(e=(t=t.replace(/==?$/,"")).length),(e%4==1||/[^+a-zA-Z0-9/]/.test(t))&&r("Invalid character: the string to be decoded is not correctly encoded.");for(var n,i,s=0,o="",a=-1;++a<e;)i=c.indexOf(t.charAt(a)),n=s%4?64*n+i:i,s++%4&&(o+=String.fromCharCode(255&n>>(-2*s&6)));return o},version:"0.1.0"};if(i&&!i.nodeType)if(s)s.exports=h;else for(var d in h)h.hasOwnProperty(d)&&(i[d]=h[d]);else n.base64=h}(f)}(S,S.exports);function C(t,e,n,i,s=Date.now){const o=function(t){const e=t.split(".")[1];let n;try{n=JSON.parse(S.exports.decode(e))}catch(t){throw new Error("Error decoding Speechly token!")}return{appId:n.appId,projectId:n.projectId,deviceId:n.deviceId,configId:n.configId,scopes:n.scope.split(" "),issuer:n.iss,audience:n.aud,expiresAtMs:1e3*n.exp}}(t);return!(o.expiresAtMs-s()<36e5)&&(o.appId===n&&o.projectId===e&&o.deviceId===i)}const g=16e3,b=new Error("Microphone is not initialized"),v=new Error("Microphone is already initialized"),m=new Error("Current device does not support microphone API"),w=new Error("Microphone consent is no given"),y=new Error("AppId changed without project login");class k{constructor(t,e,n,i=!1){this.initialized=!1,this.muted=!1,this.stats={maxSignalEnergy:0},this.handleAudio=t=>{this.muted||t.length>0&&this.apiClient.sendAudio(t)},this.isWebkit=t,this.apiClient=n,this.sampleRate=e,this.debug=i}initialize(t,e){var n;return o(this,void 0,void 0,(function*(){if(void 0===(null===(n=window.navigator)||void 0===n?void 0:n.mediaDevices))throw m;this.audioContext=t,this.resampleRatio=this.audioContext.sampleRate/this.sampleRate;try{this.mediaStream=yield window.navigator.mediaDevices.getUserMedia(e)}catch(t){throw w}if(this.audioTrack=this.mediaStream.getAudioTracks()[0],this.isWebkit||(yield this.audioContext.resume()),void 0!==window.AudioWorkletNode){const t=new Blob(["\n// Indices for the Control SAB.\nconst CONTROL = {\n 'WRITE_INDEX': 0,\n 'FRAMES_AVAILABLE': 1,\n 'LOCK': 2,\n};\n\nclass SpeechlyProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n\n this._initialized = false;\n this.debug = false;\n this.port.onmessage = this._initialize.bind(this);\n }\n\n _initialize(event) {\n this.controlSAB = new Int32Array(event.data.controlSAB);\n this.dataSAB = new Float32Array(event.data.dataSAB);\n this.debug = event.data.debug;\n this.sharedBufferSize = this.dataSAB.length;\n this.buffer = new Float32Array(0);\n this._initialized = true;\n }\n\n _transferDataToSharedBuffer(data) {\n this.controlSAB[CONTROL.LOCK] = 1\n let inputWriteIndex = this.controlSAB[CONTROL.WRITE_INDEX]\n if (this.controlSAB[CONTROL.FRAMES_AVAILABLE] > 0) {\n if (inputWriteIndex + data.length > this.sharedBufferSize) {\n // console.log('buffer overflow')\n inputWriteIndex = 0\n }\n }\n this.dataSAB.set(data, inputWriteIndex)\n this.controlSAB[CONTROL.WRITE_INDEX] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.LOCK] = 0\n }\n\n _pushData(data) {\n if (this.debug) {\n const signalEnergy = getStandardDeviation(data)\n this.port.postMessage({\n type: 'STATS',\n signalEnergy: signalEnergy\n });\n }\n\n if (this.buffer.length > this.sharedBufferSize) {\n const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize)\n this._transferDataToSharedBuffer(dataToTransfer)\n this.buffer = this.buffer.subarray(this.sharedBufferSize)\n }\n let concat = new Float32Array(this.buffer.length + data.length)\n concat.set(this.buffer)\n concat.set(data, this.buffer.length)\n this.buffer = concat\n }\n\n process(inputs, outputs, parameters) {\n const inputChannelData = inputs[0][0];\n if (inputChannelData !== undefined) {\n if (this.controlSAB && this.dataSAB) {\n this._pushData(inputChannelData);\n } else {\n this.port.postMessage({\n type: 'DATA',\n frames: inputChannelData\n });\n }\n }\n \n return true;\n }\n}\n\nfunction getStandardDeviation(array) {\n const n = array.length\n const mean = array.reduce((a, b) => a + b) / n\n return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n)\n}\n\nregisterProcessor('speechly-worklet', SpeechlyProcessor);\n"],{type:"text/javascript"}),e=window.URL.createObjectURL(t);yield this.audioContext.audioWorklet.addModule(e);const n=new AudioWorkletNode(this.audioContext,"speechly-worklet");if(this.audioContext.createMediaStreamSource(this.mediaStream).connect(n),n.connect(this.audioContext.destination),void 0!==window.SharedArrayBuffer){const t=new window.SharedArrayBuffer(4*Int32Array.BYTES_PER_ELEMENT),e=new window.SharedArrayBuffer(1024*Float32Array.BYTES_PER_ELEMENT);this.apiClient.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e}),n.port.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e,debug:this.debug})}else this.debug&&console.log("[SpeechlyClient]","can not use SharedArrayBuffer");n.port.onmessage=t=>{switch(t.data.type){case"STATS":t.data.signalEnergy>this.stats.maxSignalEnergy&&(this.stats.maxSignalEnergy=t.data.signalEnergy);break;case"DATA":this.handleAudio(t.data.frames)}}}else{if(this.debug&&console.log("[SpeechlyClient]","can not use AudioWorkletNode"),this.isWebkit){const t=4096*Math.pow(2,Math.ceil(Math.log(this.resampleRatio)/Math.log(2)));this.audioProcessor=this.audioContext.createScriptProcessor(t,1,1)}else this.audioProcessor=this.audioContext.createScriptProcessor(void 0,1,1);this.audioContext.createMediaStreamSource(this.mediaStream).connect(this.audioProcessor),this.audioProcessor.connect(this.audioContext.destination),this.audioProcessor.addEventListener("audioprocess",(t=>{this.handleAudio(t.inputBuffer.getChannelData(0))}))}this.initialized=!0,this.mute()}))}close(){return o(this,void 0,void 0,(function*(){if(this.mute(),!this.initialized)throw b;this.audioTrack.enabled=!1;if(this.mediaStream.getTracks().forEach((t=>t.stop())),null!=this.audioProcessor){this.audioProcessor.disconnect()}this.mediaStream=void 0,this.audioTrack=void 0,this.audioProcessor=void 0,this.initialized=!1}))}mute(){this.muted=!0}unmute(){this.muted=!1}printStats(){if(null!=this.audioTrack){const t=this.audioTrack.getSettings();console.log(this.audioTrack.label,this.audioTrack.readyState),console.log("channelCount",t.channelCount),console.log("latency",t.latency),console.log("autoGainControl",t.autoGainControl)}console.log("maxSignalEnergy",this.stats.maxSignalEnergy)}}var A;t.WebsocketResponseType=void 0,(A=t.WebsocketResponseType||(t.WebsocketResponseType={})).Opened="WEBSOCKET_OPEN",A.Closed="WEBSOCKET_CLOSED",A.SourceSampleRateSetSuccess="SOURSE_SAMPLE_RATE_SET_SUCCESS",A.Started="started",A.Stopped="stopped",A.SegmentEnd="segment_end",A.Transcript="transcript",A.Entity="entity",A.Intent="intent",A.TentativeTranscript="tentative_transcript",A.TentativeEntities="tentative_entities",A.TentativeIntent="tentative_intent";class E{constructor(){this.startCbs=[],this.stopCbs=[],this.onResponseCb=()=>{},this.onCloseCb=()=>{},this.onWebsocketMessage=e=>{const n=e.data;switch(n.type){case t.WebsocketResponseType.Opened:null!=this.resolveInitialization&&this.resolveInitialization();break;case t.WebsocketResponseType.Closed:this.onCloseCb({code:e.data.code,reason:e.data.reason,wasClean:e.data.wasClean});break;case t.WebsocketResponseType.SourceSampleRateSetSuccess:null!=this.resolveSourceSampleRateSet&&this.resolveSourceSampleRateSet();break;case t.WebsocketResponseType.Started:this.startCbs.forEach((t=>{try{t(void 0,n.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStart" callback:',t)}})),this.startCbs.length=0;break;case t.WebsocketResponseType.Stopped:this.stopCbs.forEach((t=>{try{t(void 0,n.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStop" callback:',t)}})),this.stopCbs.length=0;break;default:this.onResponseCb(n)}};const e=new Blob(["/**\n * Known WebSocket response types.\n * @public\n */\nvar WebsocketResponseType;\n(function (WebsocketResponseType) {\n WebsocketResponseType[\"Opened\"] = \"WEBSOCKET_OPEN\";\n WebsocketResponseType[\"SourceSampleRateSetSuccess\"] = \"SOURSE_SAMPLE_RATE_SET_SUCCESS\";\n WebsocketResponseType[\"Started\"] = \"started\";\n WebsocketResponseType[\"Stopped\"] = \"stopped\";\n})(WebsocketResponseType || (WebsocketResponseType = {}));\nvar CONTROL = {\n WRITE_INDEX: 0,\n FRAMES_AVAILABLE: 1,\n LOCK: 2\n};\nvar WebsocketClient = /** @class */ (function () {\n function WebsocketClient(ctx) {\n var _this = this;\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n this.shouldResendLastFramesSent = false;\n this.buffer = new Float32Array(0);\n this.lastFramesSent = new Int16Array(0); // to re-send after switch context\n this.debug = false;\n this.initialized = false;\n // WebSocket's close handler, called e.g. when\n // - normal close (code 1000)\n // - network unreachable or unable to (re)connect (code 1006)\n // List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code\n this.onWebsocketClose = function (event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'onWebsocketClose');\n }\n _this.websocket.removeEventListener('open', _this.onWebsocketOpen);\n _this.websocket.removeEventListener('message', _this.onWebsocketMessage);\n _this.websocket.removeEventListener('error', _this.onWebsocketError);\n _this.websocket.removeEventListener('close', _this.onWebsocketClose);\n _this.websocket = undefined;\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_CLOSED', code: event.code, reason: event.reason, wasClean: event.wasClean });\n };\n this.onWebsocketOpen = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket opened');\n }\n if (_this.isContextStarted && !_this.isStartContextConfirmed) {\n _this.send(_this.outbox);\n }\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });\n };\n this.onWebsocketError = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket error');\n }\n };\n this.onWebsocketMessage = function (event) {\n var response;\n try {\n response = JSON.parse(event.data);\n }\n catch (e) {\n console.error('[SpeechlyClient] Error parsing response from the server:', e);\n return;\n }\n if (response.type === WebsocketResponseType.Started) {\n _this.isStartContextConfirmed = true;\n if (_this.shouldResendLastFramesSent) {\n _this.resendLastFrames();\n _this.shouldResendLastFramesSent = false;\n }\n }\n _this.workerCtx.postMessage(response);\n };\n this.workerCtx = ctx;\n }\n WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {\n this.debug = debug;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'initialize worker');\n }\n this.apiUrl = apiUrl;\n this.authToken = authToken;\n this.targetSampleRate = targetSampleRate;\n this.initialized = true;\n this.connect(0);\n };\n WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {\n this.sourceSampleRate = sourceSampleRate;\n this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'resampleRatio', this.resampleRatio);\n }\n if (this.resampleRatio > 1) {\n this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);\n }\n this.workerCtx.postMessage({ type: 'SOURSE_SAMPLE_RATE_SET_SUCCESS' });\n if (isNaN(this.resampleRatio)) {\n throw Error(\"resampleRatio is NaN source rate is \".concat(this.sourceSampleRate, \" and target rate is \").concat(this.targetSampleRate));\n }\n };\n WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {\n this.controlSAB = new Int32Array(controlSAB);\n this.dataSAB = new Float32Array(dataSAB);\n var audioHandleInterval = this.dataSAB.length / 32; // ms\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Audio handle interval', audioHandleInterval, 'ms');\n }\n setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);\n };\n WebsocketClient.prototype.connect = function (timeout) {\n if (timeout === void 0) { timeout = 1000; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connect in ', timeout / 1000, 'sec');\n }\n setTimeout(this.initializeWebsocket.bind(this), timeout);\n };\n WebsocketClient.prototype.initializeWebsocket = function () {\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connecting to ', this.apiUrl);\n }\n this.websocket = new WebSocket(this.apiUrl, this.authToken);\n this.websocket.addEventListener('open', this.onWebsocketOpen);\n this.websocket.addEventListener('message', this.onWebsocketMessage);\n this.websocket.addEventListener('error', this.onWebsocketError);\n this.websocket.addEventListener('close', this.onWebsocketClose);\n };\n WebsocketClient.prototype.isOpen = function () {\n return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;\n };\n WebsocketClient.prototype.resendLastFrames = function () {\n if (this.lastFramesSent.length > 0) {\n this.send(this.lastFramesSent);\n this.lastFramesSent = new Int16Array(0);\n }\n };\n WebsocketClient.prototype.sendAudio = function (audioChunk) {\n if (!this.isContextStarted) {\n return;\n }\n if (audioChunk.length > 0) {\n if (this.resampleRatio > 1) {\n // Downsampling\n this.send(this.downsample(audioChunk));\n }\n else {\n this.send(float32ToInt16(audioChunk));\n }\n }\n };\n WebsocketClient.prototype.sendAudioFromSAB = function () {\n if (!this.isContextStarted) {\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n return;\n }\n if (this.controlSAB == undefined) {\n return;\n }\n var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];\n var lock = this.controlSAB[CONTROL.LOCK];\n if (lock == 0 && framesAvailable > 0) {\n var data = this.dataSAB.subarray(0, framesAvailable);\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n if (data.length > 0) {\n var frames_1;\n if (this.resampleRatio > 1) {\n frames_1 = this.downsample(data);\n }\n else {\n frames_1 = float32ToInt16(data);\n }\n this.send(frames_1);\n // 16000 per second, 1000 in 100 ms\n // save last 250 ms\n if (this.lastFramesSent.length > 1024 * 4) {\n this.lastFramesSent = frames_1;\n }\n else {\n var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);\n concat.set(this.lastFramesSent);\n concat.set(frames_1, this.lastFramesSent.length);\n this.lastFramesSent = concat;\n }\n }\n }\n };\n WebsocketClient.prototype.startContext = function (appId) {\n if (this.isContextStarted) {\n console.log('Cant start context: it has been already started');\n return;\n }\n this.isContextStarted = true;\n this.isStartContextConfirmed = false;\n if (appId !== undefined) {\n this.outbox = JSON.stringify({ event: 'start', appId: appId });\n }\n else {\n this.outbox = JSON.stringify({ event: 'start' });\n }\n this.send(this.outbox);\n };\n WebsocketClient.prototype.stopContext = function () {\n if (!this.websocket) {\n throw Error('Cant start context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant stop context: it is not started');\n return;\n }\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n };\n WebsocketClient.prototype.switchContext = function (newAppId) {\n if (!this.websocket) {\n throw Error('Cant switch context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant switch context: it is not started');\n return;\n }\n if (newAppId == undefined) {\n console.log('Cant switch context: new app id is undefined');\n return;\n }\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n this.shouldResendLastFramesSent = true;\n this.send(JSON.stringify({ event: 'start', appId: newAppId }));\n };\n WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {\n if (websocketCode === void 0) { websocketCode = 1005; }\n if (reason === void 0) { reason = \"No Status Received\"; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Websocket closing');\n }\n if (!this.websocket) {\n throw Error('Websocket is not open');\n }\n this.websocket.close(websocketCode, reason);\n };\n WebsocketClient.prototype.downsample = function (input) {\n var inputBuffer = new Float32Array(this.buffer.length + input.length);\n inputBuffer.set(this.buffer, 0);\n inputBuffer.set(input, this.buffer.length);\n var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);\n var outputBuffer = new Int16Array(outputLength);\n for (var i = 0; i < outputLength; i++) {\n var offset = Math.round(this.resampleRatio * i);\n var val = 0.0;\n for (var j = 0; j < this.filter.length; j++) {\n val += inputBuffer[offset + j] * this.filter[j];\n }\n outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);\n }\n var remainingOffset = Math.round(this.resampleRatio * outputLength);\n if (remainingOffset < inputBuffer.length) {\n this.buffer = inputBuffer.subarray(remainingOffset);\n }\n else {\n this.buffer = new Float32Array(0);\n }\n return outputBuffer;\n };\n WebsocketClient.prototype.send = function (data) {\n if (!this.isOpen()) {\n throw Error('Cant send data: websocket is inactive');\n }\n try {\n this.websocket.send(data);\n }\n catch (error) {\n console.log('[SpeechlyClient]', 'Server connection error', error);\n }\n };\n return WebsocketClient;\n}());\nvar ctx = self;\nvar websocketClient = new WebsocketClient(ctx);\nctx.onmessage = function (e) {\n switch (e.data.type) {\n case 'INIT':\n websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);\n break;\n case 'SET_SOURSE_SAMPLE_RATE':\n websocketClient.setSourceSampleRate(e.data.sourceSampleRate);\n break;\n case 'SET_SHARED_ARRAY_BUFFERS':\n websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);\n break;\n case 'CLOSE':\n websocketClient.closeWebsocket(1000, \"Close requested by client\");\n break;\n case 'START_CONTEXT':\n websocketClient.startContext(e.data.appId);\n break;\n case 'SWITCH_CONTEXT':\n websocketClient.switchContext(e.data.appId);\n break;\n case 'STOP_CONTEXT':\n websocketClient.stopContext();\n break;\n case 'AUDIO':\n websocketClient.sendAudio(e.data.payload);\n break;\n default:\n console.log('WORKER', e);\n }\n};\nfunction float32ToInt16(buffer) {\n var buf = new Int16Array(buffer.length);\n for (var l = 0; l < buffer.length; l++) {\n buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);\n }\n return buf;\n}\nfunction generateFilter(sourceSampleRate, targetSampleRate, length) {\n if (length % 2 === 0) {\n throw Error('Filter length must be odd');\n }\n var cutoff = targetSampleRate / 2;\n var filter = new Float32Array(length);\n var sum = 0;\n for (var i = 0; i < length; i++) {\n var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));\n sum += x;\n filter[i] = x;\n }\n for (var i = 0; i < length; i++) {\n filter[i] = filter[i] / sum;\n }\n return filter;\n}\nfunction sinc(x) {\n if (x === 0.0) {\n return 1.0;\n }\n var piX = Math.PI * x;\n return Math.sin(piX) / piX;\n}\n"],{type:"text/javascript"}),n=window.URL.createObjectURL(e);this.worker=new Worker(n),this.worker.addEventListener("message",this.onWebsocketMessage)}onResponse(t){this.onResponseCb=t}onClose(t){this.onCloseCb=t}initialize(t,e,n,i){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:"INIT",apiUrl:t,authToken:e,targetSampleRate:n,debug:i}),new Promise((t=>{this.resolveInitialization=t}))}))}setSourceSampleRate(t){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:"SET_SOURSE_SAMPLE_RATE",sourceSampleRate:t}),new Promise((t=>{this.resolveSourceSampleRateSet=t}))}))}close(){return o(this,void 0,void 0,(function*(){return new Promise(((t,e)=>{this.worker.postMessage({type:"CLOSE",code:1e3,message:"Client has ended the session"}),t()}))}))}startContext(t){return o(this,void 0,void 0,(function*(){return new Promise(((e,n)=>{this.startCbs.push(((t,i)=>{void 0!==t?n(t):e(i)})),null!=t?this.worker.postMessage({type:"START_CONTEXT",appId:t}):this.worker.postMessage({type:"START_CONTEXT"})}))}))}stopContext(){return o(this,void 0,void 0,(function*(){return new Promise(((t,e)=>{this.stopCbs.push(((n,i)=>{void 0!==n?e(n):t(i)})),this.worker.postMessage({type:"STOP_CONTEXT"})}))}))}switchContext(t){return o(this,void 0,void 0,(function*(){return new Promise(((e,n)=>{this.startCbs.push(((t,i)=>{void 0!==t?n(t):e(i)})),this.worker.postMessage({type:"SWITCH_CONTEXT",appId:t})}))}))}postMessage(t){this.worker.postMessage(t)}sendAudio(t){this.worker.postMessage({type:"AUDIO",payload:t})}}class R{constructor(){this.storage=window.localStorage}get(t){return this.storage.getItem(t)}set(t,e){this.storage.setItem(t,e)}getOrSet(t,e){let n=this.storage.getItem(t);return null===n&&(n=e(),this.storage.setItem(t,n)),n}}const T=new Error("Current device does not support storage API"),x=new Error("Requested key was not present in storage");class I{constructor(t,e){this.isFinalized=!1,this.words=[],this.entities=new Map,this.intent={intent:"",isFinal:!1},this.contextId=t,this.id=e}toSegment(){let t=0;const e=new Array(this.entities.size);return this.entities.forEach((n=>{e[t]=n,t++})),{id:this.id,contextId:this.contextId,isFinal:this.isFinalized,words:this.words,entities:e,intent:this.intent}}toString(){const t=this.toSegment(),e=t.words.filter((t=>t.value)).map((t=>({value:t.value,index:t.index}))),n=Object.assign(Object.assign({},t),{words:e});return JSON.stringify(n,null,2)}updateTranscript(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||(this.words[t.index]=t)})),this}updateEntities(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||this.entities.set(function(t){return`${t.startPosition.toString()}:${t.endPosition.toString()}`}(t),t)})),this}updateIntent(t){return this.isFinalized&&!t.isFinal||(this.intent=t),this}finalize(){return this.entities.forEach(((t,e)=>{t.isFinal||this.entities.delete(e)})),this.words=this.words.filter((t=>t.isFinal)),this.intent.isFinal||(this.intent.intent="",this.intent.isFinal=!0),this.isFinalized=!0,this}}function _(t,e){return{intent:t.intent,isFinal:e}}const W="speechly-auth-token";t.Client=class{constructor(e){var n,i,s,o,a,r,c,l,h,d,p;this.activeContexts=new Map,this.maxReconnectAttemptCount=10,this.contextStopDelay=250,this.connectAttempt=0,this.state=t.ClientState.Disconnected,this.stateChangeCb=()=>{},this.segmentChangeCb=()=>{},this.tentativeTranscriptCb=()=>{},this.tentativeEntitiesCb=()=>{},this.tentativeIntentCb=()=>{},this.transcriptCb=()=>{},this.entityCb=()=>{},this.intentCb=()=>{},this.handleWebsocketResponse=e=>{var n;this.debug&&console.log("[SpeechlyClient]","Received response",e);const{audio_context:i,segment_id:s,type:o}=e;let{data:a}=e;const r=this.activeContexts.get(i);if(void 0===r)return void console.warn("[SpeechlyClient]","Received response for non-existent context",i);let c=null!==(n=r.get(s))&&void 0!==n?n:new I(i,s);switch(o){case t.WebsocketResponseType.TentativeTranscript:a=a;const e=function(t){return t.words.map((({word:t,index:e,start_timestamp:n,end_timestamp:i})=>({value:t,index:e,startTimestamp:n,endTimestamp:i,isFinal:!1})))}(a);this.tentativeTranscriptCb(i,s,e,a.transcript),c=c.updateTranscript(e);break;case t.WebsocketResponseType.Transcript:a=a;const n=function(t){return{value:t.word,index:t.index,startTimestamp:t.start_timestamp,endTimestamp:t.end_timestamp,isFinal:!0}}(a);this.transcriptCb(i,s,n),c=c.updateTranscript([n]);break;case t.WebsocketResponseType.TentativeEntities:a=a;const o=function(t){return t.entities.map((({entity:t,value:e,start_position:n,end_position:i})=>({type:t,value:e,startPosition:n,endPosition:i,isFinal:!1})))}(a);this.tentativeEntitiesCb(i,s,o),c=c.updateEntities(o);break;case t.WebsocketResponseType.Entity:a=a;const r=function(t){return{type:t.entity,value:t.value,startPosition:t.start_position,endPosition:t.end_position,isFinal:!0}}(a);this.entityCb(i,s,r),c=c.updateEntities([r]);break;case t.WebsocketResponseType.TentativeIntent:a=a;const l=_(a,!1);this.tentativeIntentCb(i,s,l),c=c.updateIntent(l);break;case t.WebsocketResponseType.Intent:a=a;const h=_(a,!0);this.intentCb(i,s,h),c=c.updateIntent(h);break;case t.WebsocketResponseType.SegmentEnd:c=c.finalize()}r.set(s,c),this.activeContexts.set(i,r),this.logSegments&&console.info(c.toString()),this.segmentChangeCb(c.toSegment())},this.handleWebsocketClosure=e=>{if(1e3===e.code)this.debug&&console.log("[SpeechlyClient]","Websocket closed",e);else{if(this.debug&&console.error("[SpeechlyClient]","Websocket closed due to error",e),void 0===this.deviceId)return void this.setState(t.ClientState.Failed);this.reconnect()}},this.sampleRate=null!==(n=e.sampleRate)&&void 0!==n?n:g;try{const t=window.navigator.mediaDevices.getSupportedConstraints();this.nativeResamplingSupported=!0===t.sampleRate,null!=e.autoGainControl&&e.autoGainControl?this.autoGainControl=!0===t.autoGainControl:this.autoGainControl=!1}catch(t){this.nativeResamplingSupported=!1,this.autoGainControl=!1}if(this.debug=null!==(i=e.debug)&&void 0!==i&&i,this.logSegments=null!==(s=e.logSegments)&&void 0!==s&&s,this.loginUrl=null!==(o=e.loginUrl)&&void 0!==o?o:"https://api.speechly.com/login",this.appId=null!==(a=e.appId)&&void 0!==a?a:void 0,this.projectId=null!==(r=e.projectId)&&void 0!==r?r:void 0,this.apiClient=null!==(c=e.apiClient)&&void 0!==c?c:new E,this.apiUrl=function(t,e){const n=new URLSearchParams;return n.append("sampleRate",e.toString()),`${t}?${n.toString()}`}(null!==(l=e.apiUrl)&&void 0!==l?l:"wss://api.speechly.com/ws/v1",null!==(h=e.sampleRate)&&void 0!==h?h:g),void 0!==this.appId&&void 0!==this.projectId)throw Error("[SpeechlyClient] You cannot use both appId and projectId at the same time");if(this.storage=null!==(d=e.storage)&&void 0!==d?d:new R,this.deviceId=this.storage.getOrSet("speechly-device-id",u),void 0!==window.AudioContext)this.isWebkit=!1;else{if(void 0===window.webkitAudioContext)throw m;this.isWebkit=!0}this.microphone=null!==(p=e.microphone)&&void 0!==p?p:new k(this.isWebkit,this.sampleRate,this.apiClient,this.debug),this.apiClient.onResponse(this.handleWebsocketResponse),this.apiClient.onClose(this.handleWebsocketClosure),this.connectPromise=null,this.initializePromise=null,window.SpeechlyClient=this,!1!==e.connect&&this.connect()}getReconnectDelayMs(t){return 100*Math.pow(2,t)}sleep(t){return o(this,void 0,void 0,(function*(){return new Promise((e=>setTimeout(e,t)))}))}connect(){return o(this,void 0,void 0,(function*(){null===this.connectPromise&&(this.connectPromise=(()=>o(this,void 0,void 0,(function*(){yield this.sleep(this.getReconnectDelayMs(this.connectAttempt++));const e=this.storage.get(W);if(null!=e&&C(e,this.projectId,this.appId,this.deviceId))this.authToken=e;else try{this.authToken=yield function(t,e,n,i,s=fetch,a=Date.now){var r;return o(this,void 0,void 0,(function*(){let o;o=void 0!==e?{projectId:e,deviceId:i}:{appId:n,deviceId:i};const c=yield s(t,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(o)}),l=yield c.json();if(200!==c.status)throw Error(null!==(r=l.error)&&void 0!==r?r:`Speechly API login request failed with ${c.status}`);if(void 0===l.access_token)throw Error("Invalid login response from Speechly API");if(!C(l.access_token,e,n,i,a))throw Error("Invalid token received from Speechly API");return l.access_token}))}(this.loginUrl,this.projectId,this.appId,this.deviceId),this.storage.set(W,this.authToken)}catch(e){throw this.setState(t.ClientState.Failed),e}try{yield this.apiClient.initialize(this.apiUrl,this.authToken,this.sampleRate,this.debug)}catch(e){throw this.setState(t.ClientState.Failed),e}})))()),yield this.connectPromise}))}initialize(){return o(this,void 0,void 0,(function*(){yield this.connect(),null===this.initializePromise&&(this.initializePromise=(()=>o(this,void 0,void 0,(function*(){this.setState(t.ClientState.Connecting);try{if(this.isWebkit)void 0!==window.webkitAudioContext&&(this.audioContext=new window.webkitAudioContext);else{const t={};this.nativeResamplingSupported&&(t.sampleRate=this.sampleRate),this.audioContext=new window.AudioContext(t)}const t={video:!1};if(this.nativeResamplingSupported||this.autoGainControl?t.audio={sampleRate:this.sampleRate,autoGainControl:this.autoGainControl}:t.audio=!0,null==this.audioContext)throw m;this.isWebkit&&(yield this.audioContext.resume()),yield this.apiClient.setSourceSampleRate(this.audioContext.sampleRate),yield this.microphone.initialize(this.audioContext,t)}catch(e){switch(e){case m:this.setState(t.ClientState.NoBrowserSupport);break;case w:this.setState(t.ClientState.NoAudioConsent);break;default:this.setState(t.ClientState.Failed)}throw e}this.setState(t.ClientState.Connected)})))()),yield this.initializePromise}))}close(){return o(this,void 0,void 0,(function*(){const e=[];try{yield this.microphone.close()}catch(t){e.push(t.message)}try{yield this.apiClient.close()}catch(t){e.push(t.message)}if(this.activeContexts.clear(),this.connectPromise=null,this.initializePromise=null,this.setState(t.ClientState.Disconnected),e.length>0)throw Error(e.join(","))}))}switchContext(e){return o(this,void 0,void 0,(function*(){if(this.state===t.ClientState.Recording){this.resolveStopContext=void 0;const t=yield this.apiClient.switchContext(e);this.activeContexts.set(t,new Map)}}))}startContext(e){return o(this,void 0,void 0,(function*(){if(yield this.initialize(),null!=this.resolveStopContext&&(this.resolveStopContext(),yield this.stoppedContextIdPromise),this.state===t.ClientState.Disconnected||this.state===t.ClientState.Connecting)throw Error("Cannot start context - client is not connected");this.setState(t.ClientState.Starting);return yield this._startContext(e)}))}_startContext(e){return o(this,void 0,void 0,(function*(){let n;try{if(null!=this.projectId)n=yield this.apiClient.startContext(e);else{if(null!=e&&this.appId!==e)throw y;n=yield this.apiClient.startContext()}}catch(e){if(e===y)this.setState(t.ClientState.Failed);else this.setState(t.ClientState.Connected);throw e}return this.setState(t.ClientState.Recording),this.microphone.unmute(),this.activeContexts.set(n,new Map),n}))}stopContext(){return o(this,void 0,void 0,(function*(){if(this.state!==t.ClientState.Recording&&this.state!==t.ClientState.Starting)throw Error("Cannot stop context - client is not recording");this.setState(t.ClientState.Stopping),this.stoppedContextIdPromise=new Promise((t=>{Promise.race([new Promise((t=>setTimeout(t,this.contextStopDelay))),new Promise((t=>{this.resolveStopContext=t}))]).then((()=>{this._stopContext().then((e=>{t(e)})).catch((t=>{throw t}))})).catch((t=>{throw t}))}));const e=yield this.stoppedContextIdPromise;return this.setState(t.ClientState.Connected),this.activeContexts.delete(e),e}))}_stopContext(){return o(this,void 0,void 0,(function*(){let e;this.microphone.mute();try{e=yield this.apiClient.stopContext()}catch(e){throw this.setState(t.ClientState.Failed),e}return e}))}onStateChange(t){this.stateChangeCb=t}onSegmentChange(t){this.segmentChangeCb=t}onTentativeTranscript(t){this.tentativeTranscriptCb=t}onTranscript(t){this.transcriptCb=t}onTentativeEntities(t){this.tentativeEntitiesCb=t}onEntity(t){this.entityCb=t}onTentativeIntent(t){this.tentativeIntentCb=t}onIntent(t){this.intentCb=t}reconnect(){this.debug&&console.log("[SpeechlyClient]","Reconnecting...",this.connectAttempt),this.state!==t.ClientState.Failed&&this.connectAttempt<this.maxReconnectAttemptCount?(this.connectPromise=null,this.connect()):(console.error("[SpeechlyClient] Maximum reconnect count reached, giving up."),this.setState(t.ClientState.Failed))}setState(t){this.state!==t&&(this.debug&&console.log("[SpeechlyClient]","State transition",n(this.state),n(t)),this.state=t,this.stateChangeCb(t))}printStats(){this.microphone.printStats()}},t.DefaultSampleRate=g,t.ErrAlreadyInitialized=v,t.ErrAppIdChangeWithoutProjectLogin=y,t.ErrDeviceNotSupported=m,t.ErrKeyNotFound=x,t.ErrNoAudioConsent=w,t.ErrNoStorageSupport=T,t.ErrNotInitialized=b,t.stateToString=n,Object.defineProperty(t,"__esModule",{value:!0})}));
!function(t,e){!function(n){var i=e,s=t&&t.exports==i&&t,o="object"==typeof f&&f;o.global!==o&&o.window!==o||(n=o);var a=function(t){this.message=t};(a.prototype=new Error).name="InvalidCharacterError";var r=function(t){throw new a(t)},l="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",c=/[\t\n\f\r ]/g,h={encode:function(t){t=String(t),/[^\0-\xFF]/.test(t)&&r("The string to be encoded contains characters outside of the Latin1 range.");for(var e,n,i,s,o=t.length%3,a="",c=-1,h=t.length-o;++c<h;)e=t.charCodeAt(c)<<16,n=t.charCodeAt(++c)<<8,i=t.charCodeAt(++c),a+=l.charAt((s=e+n+i)>>18&63)+l.charAt(s>>12&63)+l.charAt(s>>6&63)+l.charAt(63&s);return 2==o?(e=t.charCodeAt(c)<<8,n=t.charCodeAt(++c),a+=l.charAt((s=e+n)>>10)+l.charAt(s>>4&63)+l.charAt(s<<2&63)+"="):1==o&&(s=t.charCodeAt(c),a+=l.charAt(s>>2)+l.charAt(s<<4&63)+"=="),a},decode:function(t){var e=(t=String(t).replace(c,"")).length;e%4==0&&(e=(t=t.replace(/==?$/,"")).length),(e%4==1||/[^+a-zA-Z0-9/]/.test(t))&&r("Invalid character: the string to be decoded is not correctly encoded.");for(var n,i,s=0,o="",a=-1;++a<e;)i=l.indexOf(t.charAt(a)),n=s%4?64*n+i:i,s++%4&&(o+=String.fromCharCode(255&n>>(-2*s&6)));return o},version:"0.1.0"};if(i&&!i.nodeType)if(s)s.exports=h;else for(var d in h)h.hasOwnProperty(d)&&(i[d]=h[d]);else n.base64=h}(f)}(S,S.exports);function C(t,e,n,i,s=Date.now){const o=function(t){const e=t.split(".")[1];let n;try{n=JSON.parse(S.exports.decode(e))}catch(t){throw new Error("Error decoding Speechly token!")}return{appId:n.appId,projectId:n.projectId,deviceId:n.deviceId,configId:n.configId,scopes:n.scope.split(" "),issuer:n.iss,audience:n.aud,expiresAtMs:1e3*n.exp}}(t);return!(o.expiresAtMs-s()<36e5)&&(o.appId===n&&o.projectId===e&&o.deviceId===i)}const g=16e3,b=new Error("Microphone is not initialized"),v=new Error("Microphone is already initialized"),m=new Error("Current device does not support microphone API"),y=new Error("Microphone consent is no given"),w=new Error("AppId changed without project login");class k{constructor(t,e,n,i=!1){this.initialized=!1,this.muted=!1,this.stats={maxSignalEnergy:0},this.handleAudio=t=>{this.muted||t.length>0&&this.apiClient.sendAudio(t)},this.isWebkit=t,this.apiClient=n,this.sampleRate=e,this.debug=i}initialize(t,e){var n;return o(this,void 0,void 0,(function*(){if(void 0===(null===(n=window.navigator)||void 0===n?void 0:n.mediaDevices))throw m;this.audioContext=t,this.resampleRatio=this.audioContext.sampleRate/this.sampleRate;try{this.mediaStream=yield window.navigator.mediaDevices.getUserMedia(e)}catch(t){throw y}if(this.audioTrack=this.mediaStream.getAudioTracks()[0],this.isWebkit||(yield this.audioContext.resume()),void 0!==window.AudioWorkletNode){const t=new Blob(["\n// Indices for the Control SAB.\nconst CONTROL = {\n 'WRITE_INDEX': 0,\n 'FRAMES_AVAILABLE': 1,\n 'LOCK': 2,\n};\n\nclass SpeechlyProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n\n this._initialized = false;\n this.debug = false;\n this.port.onmessage = this._initialize.bind(this);\n }\n\n _initialize(event) {\n this.controlSAB = new Int32Array(event.data.controlSAB);\n this.dataSAB = new Float32Array(event.data.dataSAB);\n this.debug = event.data.debug;\n this.sharedBufferSize = this.dataSAB.length;\n this.buffer = new Float32Array(0);\n this._initialized = true;\n }\n\n _transferDataToSharedBuffer(data) {\n this.controlSAB[CONTROL.LOCK] = 1\n let inputWriteIndex = this.controlSAB[CONTROL.WRITE_INDEX]\n if (this.controlSAB[CONTROL.FRAMES_AVAILABLE] > 0) {\n if (inputWriteIndex + data.length > this.sharedBufferSize) {\n // console.log('buffer overflow')\n inputWriteIndex = 0\n }\n }\n this.dataSAB.set(data, inputWriteIndex)\n this.controlSAB[CONTROL.WRITE_INDEX] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.LOCK] = 0\n }\n\n _pushData(data) {\n if (this.debug) {\n const signalEnergy = getStandardDeviation(data)\n this.port.postMessage({\n type: 'STATS',\n signalEnergy: signalEnergy\n });\n }\n\n if (this.buffer.length > this.sharedBufferSize) {\n const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize)\n this._transferDataToSharedBuffer(dataToTransfer)\n this.buffer = this.buffer.subarray(this.sharedBufferSize)\n }\n let concat = new Float32Array(this.buffer.length + data.length)\n concat.set(this.buffer)\n concat.set(data, this.buffer.length)\n this.buffer = concat\n }\n\n process(inputs, outputs, parameters) {\n const inputChannelData = inputs[0][0];\n if (inputChannelData !== undefined) {\n if (this.controlSAB && this.dataSAB) {\n this._pushData(inputChannelData);\n } else {\n this.port.postMessage({\n type: 'DATA',\n frames: inputChannelData\n });\n }\n }\n \n return true;\n }\n}\n\nfunction getStandardDeviation(array) {\n const n = array.length\n const mean = array.reduce((a, b) => a + b) / n\n return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n)\n}\n\nregisterProcessor('speechly-worklet', SpeechlyProcessor);\n"],{type:"text/javascript"}),e=window.URL.createObjectURL(t);yield this.audioContext.audioWorklet.addModule(e);const n=new AudioWorkletNode(this.audioContext,"speechly-worklet");if(this.audioContext.createMediaStreamSource(this.mediaStream).connect(n),n.connect(this.audioContext.destination),void 0!==window.SharedArrayBuffer){const t=new window.SharedArrayBuffer(4*Int32Array.BYTES_PER_ELEMENT),e=new window.SharedArrayBuffer(1024*Float32Array.BYTES_PER_ELEMENT);this.apiClient.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e}),n.port.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e,debug:this.debug})}else this.debug&&console.log("[SpeechlyClient]","can not use SharedArrayBuffer");n.port.onmessage=t=>{switch(t.data.type){case"STATS":t.data.signalEnergy>this.stats.maxSignalEnergy&&(this.stats.maxSignalEnergy=t.data.signalEnergy);break;case"DATA":this.handleAudio(t.data.frames)}}}else{if(this.debug&&console.log("[SpeechlyClient]","can not use AudioWorkletNode"),this.isWebkit){const t=4096*Math.pow(2,Math.ceil(Math.log(this.resampleRatio)/Math.log(2)));this.audioProcessor=this.audioContext.createScriptProcessor(t,1,1)}else this.audioProcessor=this.audioContext.createScriptProcessor(void 0,1,1);this.audioContext.createMediaStreamSource(this.mediaStream).connect(this.audioProcessor),this.audioProcessor.connect(this.audioContext.destination),this.audioProcessor.addEventListener("audioprocess",(t=>{this.handleAudio(t.inputBuffer.getChannelData(0))}))}this.initialized=!0,this.mute()}))}close(){return o(this,void 0,void 0,(function*(){if(this.mute(),!this.initialized)throw b;this.audioTrack.enabled=!1;if(this.mediaStream.getTracks().forEach((t=>t.stop())),null!=this.audioProcessor){this.audioProcessor.disconnect()}this.mediaStream=void 0,this.audioTrack=void 0,this.audioProcessor=void 0,this.initialized=!1}))}mute(){this.muted=!0}unmute(){this.muted=!1}printStats(){if(null!=this.audioTrack){const t=this.audioTrack.getSettings();console.log(this.audioTrack.label,this.audioTrack.readyState),console.log("channelCount",t.channelCount),console.log("latency",t.latency),console.log("autoGainControl",t.autoGainControl)}console.log("maxSignalEnergy",this.stats.maxSignalEnergy)}}var E;t.WebsocketResponseType=void 0,(E=t.WebsocketResponseType||(t.WebsocketResponseType={})).Opened="WEBSOCKET_OPEN",E.Closed="WEBSOCKET_CLOSED",E.SourceSampleRateSetSuccess="SOURSE_SAMPLE_RATE_SET_SUCCESS",E.Started="started",E.Stopped="stopped",E.SegmentEnd="segment_end",E.Transcript="transcript",E.Entity="entity",E.Intent="intent",E.TentativeTranscript="tentative_transcript",E.TentativeEntities="tentative_entities",E.TentativeIntent="tentative_intent";class A{constructor(){this.startCbs=[],this.stopCbs=[],this.onResponseCb=()=>{},this.onCloseCb=()=>{},this.onWebsocketMessage=e=>{const n=e.data;switch(n.type){case t.WebsocketResponseType.Opened:null!=this.resolveInitialization&&this.resolveInitialization();break;case t.WebsocketResponseType.Closed:this.onCloseCb({code:e.data.code,reason:e.data.reason,wasClean:e.data.wasClean});break;case t.WebsocketResponseType.SourceSampleRateSetSuccess:null!=this.resolveSourceSampleRateSet&&this.resolveSourceSampleRateSet();break;case t.WebsocketResponseType.Started:this.startCbs.forEach((t=>{try{t(void 0,n.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStart" callback:',t)}})),this.startCbs.length=0;break;case t.WebsocketResponseType.Stopped:this.stopCbs.forEach((t=>{try{t(void 0,n.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStop" callback:',t)}})),this.stopCbs.length=0;break;default:this.onResponseCb(n)}};const e=new Blob(["/**\n * Known WebSocket response types.\n * @public\n */\nvar WebsocketResponseType;\n(function (WebsocketResponseType) {\n WebsocketResponseType[\"Opened\"] = \"WEBSOCKET_OPEN\";\n WebsocketResponseType[\"SourceSampleRateSetSuccess\"] = \"SOURSE_SAMPLE_RATE_SET_SUCCESS\";\n WebsocketResponseType[\"Started\"] = \"started\";\n WebsocketResponseType[\"Stopped\"] = \"stopped\";\n})(WebsocketResponseType || (WebsocketResponseType = {}));\nvar CONTROL = {\n WRITE_INDEX: 0,\n FRAMES_AVAILABLE: 1,\n LOCK: 2\n};\nvar WebsocketClient = /** @class */ (function () {\n function WebsocketClient(ctx) {\n var _this = this;\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n this.shouldResendLastFramesSent = false;\n this.buffer = new Float32Array(0);\n this.lastFramesSent = new Int16Array(0); // to re-send after switch context\n this.debug = false;\n this.initialized = false;\n // WebSocket's close handler, called e.g. when\n // - normal close (code 1000)\n // - network unreachable or unable to (re)connect (code 1006)\n // List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code\n this.onWebsocketClose = function (event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'onWebsocketClose');\n }\n _this.websocket.removeEventListener('open', _this.onWebsocketOpen);\n _this.websocket.removeEventListener('message', _this.onWebsocketMessage);\n _this.websocket.removeEventListener('error', _this.onWebsocketError);\n _this.websocket.removeEventListener('close', _this.onWebsocketClose);\n _this.websocket = undefined;\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_CLOSED', code: event.code, reason: event.reason, wasClean: event.wasClean });\n };\n this.onWebsocketOpen = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket opened');\n }\n if (_this.isContextStarted && !_this.isStartContextConfirmed) {\n _this.send(_this.outbox);\n }\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });\n };\n this.onWebsocketError = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket error');\n }\n };\n this.onWebsocketMessage = function (event) {\n var response;\n try {\n response = JSON.parse(event.data);\n }\n catch (e) {\n console.error('[SpeechlyClient] Error parsing response from the server:', e);\n return;\n }\n if (response.type === WebsocketResponseType.Started) {\n _this.isStartContextConfirmed = true;\n if (_this.shouldResendLastFramesSent) {\n _this.resendLastFrames();\n _this.shouldResendLastFramesSent = false;\n }\n }\n _this.workerCtx.postMessage(response);\n };\n this.workerCtx = ctx;\n }\n WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {\n this.debug = debug;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'initialize worker');\n }\n this.apiUrl = apiUrl;\n this.authToken = authToken;\n this.targetSampleRate = targetSampleRate;\n this.initialized = true;\n this.isContextStarted = false;\n this.connect(0);\n };\n WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {\n this.sourceSampleRate = sourceSampleRate;\n this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'resampleRatio', this.resampleRatio);\n }\n if (this.resampleRatio > 1) {\n this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);\n }\n this.workerCtx.postMessage({ type: 'SOURSE_SAMPLE_RATE_SET_SUCCESS' });\n if (isNaN(this.resampleRatio)) {\n throw Error(\"resampleRatio is NaN source rate is \".concat(this.sourceSampleRate, \" and target rate is \").concat(this.targetSampleRate));\n }\n };\n WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {\n this.controlSAB = new Int32Array(controlSAB);\n this.dataSAB = new Float32Array(dataSAB);\n var audioHandleInterval = this.dataSAB.length / 32; // ms\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Audio handle interval', audioHandleInterval, 'ms');\n }\n setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);\n };\n WebsocketClient.prototype.connect = function (timeout) {\n if (timeout === void 0) { timeout = 1000; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connect in ', timeout / 1000, 'sec');\n }\n setTimeout(this.initializeWebsocket.bind(this), timeout);\n };\n WebsocketClient.prototype.initializeWebsocket = function () {\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connecting to ', this.apiUrl);\n }\n this.websocket = new WebSocket(this.apiUrl, this.authToken);\n this.websocket.addEventListener('open', this.onWebsocketOpen);\n this.websocket.addEventListener('message', this.onWebsocketMessage);\n this.websocket.addEventListener('error', this.onWebsocketError);\n this.websocket.addEventListener('close', this.onWebsocketClose);\n };\n WebsocketClient.prototype.isOpen = function () {\n return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;\n };\n WebsocketClient.prototype.resendLastFrames = function () {\n if (this.lastFramesSent.length > 0) {\n this.send(this.lastFramesSent);\n this.lastFramesSent = new Int16Array(0);\n }\n };\n WebsocketClient.prototype.sendAudio = function (audioChunk) {\n if (!this.isContextStarted) {\n return;\n }\n if (audioChunk.length > 0) {\n if (this.resampleRatio > 1) {\n // Downsampling\n this.send(this.downsample(audioChunk));\n }\n else {\n this.send(float32ToInt16(audioChunk));\n }\n }\n };\n WebsocketClient.prototype.sendAudioFromSAB = function () {\n if (!this.isContextStarted) {\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n return;\n }\n if (this.controlSAB == undefined) {\n return;\n }\n var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];\n var lock = this.controlSAB[CONTROL.LOCK];\n if (lock == 0 && framesAvailable > 0) {\n var data = this.dataSAB.subarray(0, framesAvailable);\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n if (data.length > 0) {\n var frames_1;\n if (this.resampleRatio > 1) {\n frames_1 = this.downsample(data);\n }\n else {\n frames_1 = float32ToInt16(data);\n }\n this.send(frames_1);\n // 16000 per second, 1000 in 100 ms\n // save last 250 ms\n if (this.lastFramesSent.length > 1024 * 4) {\n this.lastFramesSent = frames_1;\n }\n else {\n var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);\n concat.set(this.lastFramesSent);\n concat.set(frames_1, this.lastFramesSent.length);\n this.lastFramesSent = concat;\n }\n }\n }\n };\n WebsocketClient.prototype.startContext = function (appId) {\n if (this.isContextStarted) {\n console.log('Cant start context: it has been already started');\n return;\n }\n this.isContextStarted = true;\n this.isStartContextConfirmed = false;\n if (appId !== undefined) {\n this.outbox = JSON.stringify({ event: 'start', appId: appId });\n }\n else {\n this.outbox = JSON.stringify({ event: 'start' });\n }\n this.send(this.outbox);\n };\n WebsocketClient.prototype.stopContext = function () {\n if (!this.websocket) {\n throw Error('Cant start context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant stop context: it is not started');\n return;\n }\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n };\n WebsocketClient.prototype.switchContext = function (newAppId) {\n if (!this.websocket) {\n throw Error('Cant switch context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant switch context: it is not started');\n return;\n }\n if (newAppId == undefined) {\n console.log('Cant switch context: new app id is undefined');\n return;\n }\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n this.shouldResendLastFramesSent = true;\n this.send(JSON.stringify({ event: 'start', appId: newAppId }));\n };\n WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {\n if (websocketCode === void 0) { websocketCode = 1005; }\n if (reason === void 0) { reason = \"No Status Received\"; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Websocket closing');\n }\n if (!this.websocket) {\n throw Error('Websocket is not open');\n }\n this.websocket.close(websocketCode, reason);\n };\n WebsocketClient.prototype.downsample = function (input) {\n var inputBuffer = new Float32Array(this.buffer.length + input.length);\n inputBuffer.set(this.buffer, 0);\n inputBuffer.set(input, this.buffer.length);\n var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);\n var outputBuffer = new Int16Array(outputLength);\n for (var i = 0; i < outputLength; i++) {\n var offset = Math.round(this.resampleRatio * i);\n var val = 0.0;\n for (var j = 0; j < this.filter.length; j++) {\n val += inputBuffer[offset + j] * this.filter[j];\n }\n outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);\n }\n var remainingOffset = Math.round(this.resampleRatio * outputLength);\n if (remainingOffset < inputBuffer.length) {\n this.buffer = inputBuffer.subarray(remainingOffset);\n }\n else {\n this.buffer = new Float32Array(0);\n }\n return outputBuffer;\n };\n WebsocketClient.prototype.send = function (data) {\n if (this.isOpen()) {\n try {\n this.websocket.send(data);\n }\n catch (error) {\n console.log('[SpeechlyClient]', 'Server connection error', error);\n }\n }\n };\n return WebsocketClient;\n}());\nvar ctx = self;\nvar websocketClient = new WebsocketClient(ctx);\nctx.onmessage = function (e) {\n switch (e.data.type) {\n case 'INIT':\n websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);\n break;\n case 'SET_SOURSE_SAMPLE_RATE':\n websocketClient.setSourceSampleRate(e.data.sourceSampleRate);\n break;\n case 'SET_SHARED_ARRAY_BUFFERS':\n websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);\n break;\n case 'CLOSE':\n websocketClient.closeWebsocket(1000, \"Close requested by client\");\n break;\n case 'START_CONTEXT':\n websocketClient.startContext(e.data.appId);\n break;\n case 'SWITCH_CONTEXT':\n websocketClient.switchContext(e.data.appId);\n break;\n case 'STOP_CONTEXT':\n websocketClient.stopContext();\n break;\n case 'AUDIO':\n websocketClient.sendAudio(e.data.payload);\n break;\n default:\n console.log('WORKER', e);\n }\n};\nfunction float32ToInt16(buffer) {\n var buf = new Int16Array(buffer.length);\n for (var l = 0; l < buffer.length; l++) {\n buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);\n }\n return buf;\n}\nfunction generateFilter(sourceSampleRate, targetSampleRate, length) {\n if (length % 2 === 0) {\n throw Error('Filter length must be odd');\n }\n var cutoff = targetSampleRate / 2;\n var filter = new Float32Array(length);\n var sum = 0;\n for (var i = 0; i < length; i++) {\n var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));\n sum += x;\n filter[i] = x;\n }\n for (var i = 0; i < length; i++) {\n filter[i] = filter[i] / sum;\n }\n return filter;\n}\nfunction sinc(x) {\n if (x === 0.0) {\n return 1.0;\n }\n var piX = Math.PI * x;\n return Math.sin(piX) / piX;\n}\n"],{type:"text/javascript"}),n=window.URL.createObjectURL(e);this.worker=new Worker(n),this.worker.addEventListener("message",this.onWebsocketMessage)}onResponse(t){this.onResponseCb=t}onClose(t){this.onCloseCb=t}initialize(t,e,n,i){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:"INIT",apiUrl:t,authToken:e,targetSampleRate:n,debug:i}),this.startCbs=[],this.stopCbs=[],new Promise((t=>{this.resolveInitialization=t}))}))}setSourceSampleRate(t){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:"SET_SOURSE_SAMPLE_RATE",sourceSampleRate:t}),new Promise((t=>{this.resolveSourceSampleRateSet=t}))}))}close(){return o(this,void 0,void 0,(function*(){return new Promise(((t,e)=>{this.worker.postMessage({type:"CLOSE",code:1e3,message:"Client has ended the session"}),t()}))}))}startContext(t){return o(this,void 0,void 0,(function*(){return new Promise(((e,n)=>{this.startCbs.push(((t,i)=>{void 0!==t?n(t):e(i)})),null!=t?this.worker.postMessage({type:"START_CONTEXT",appId:t}):this.worker.postMessage({type:"START_CONTEXT"})}))}))}stopContext(){return o(this,void 0,void 0,(function*(){return new Promise(((t,e)=>{this.stopCbs.push(((n,i)=>{void 0!==n?e(n):t(i)})),this.worker.postMessage({type:"STOP_CONTEXT"})}))}))}switchContext(t){return o(this,void 0,void 0,(function*(){return new Promise(((e,n)=>{this.startCbs.push(((t,i)=>{void 0!==t?n(t):e(i)})),this.worker.postMessage({type:"SWITCH_CONTEXT",appId:t})}))}))}postMessage(t){this.worker.postMessage(t)}sendAudio(t){this.worker.postMessage({type:"AUDIO",payload:t})}}class R{constructor(){this.storage=window.localStorage}get(t){return this.storage.getItem(t)}set(t,e){this.storage.setItem(t,e)}getOrSet(t,e){let n=this.storage.getItem(t);return null===n&&(n=e(),this.storage.setItem(t,n)),n}}const T=new Error("Current device does not support storage API"),x=new Error("Requested key was not present in storage");class I{constructor(t,e){this.isFinalized=!1,this.words=[],this.entities=new Map,this.intent={intent:"",isFinal:!1},this.contextId=t,this.id=e}toSegment(){let t=0;const e=new Array(this.entities.size);return this.entities.forEach((n=>{e[t]=n,t++})),{id:this.id,contextId:this.contextId,isFinal:this.isFinalized,words:this.words,entities:e,intent:this.intent}}toString(){const t=this.toSegment(),e=t.words.filter((t=>t.value)).map((t=>({value:t.value,index:t.index}))),n=Object.assign(Object.assign({},t),{words:e});return JSON.stringify(n,null,2)}updateTranscript(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||(this.words[t.index]=t)})),this}updateEntities(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||this.entities.set(function(t){return`${t.startPosition.toString()}:${t.endPosition.toString()}`}(t),t)})),this}updateIntent(t){return this.isFinalized&&!t.isFinal||(this.intent=t),this}finalize(){return this.entities.forEach(((t,e)=>{t.isFinal||this.entities.delete(e)})),this.words=this.words.filter((t=>t.isFinal)),this.intent.isFinal||(this.intent.intent="",this.intent.isFinal=!0),this.isFinalized=!0,this}}function _(t,e){return{intent:t.intent,isFinal:e}}const W="speechly-auth-token";t.Client=class{constructor(e){var n,i,s,o,a,r,l,c,h,d,u;this.listening=!1,this.activeContexts=new Map,this.maxReconnectAttemptCount=10,this.contextStopDelay=250,this.connectAttempt=0,this.connectPromise=null,this.initializePromise=null,this.listeningPromise=null,this.state=t.ClientState.Disconnected,this.stateChangeCb=()=>{},this.segmentChangeCb=()=>{},this.tentativeTranscriptCb=()=>{},this.tentativeEntitiesCb=()=>{},this.tentativeIntentCb=()=>{},this.transcriptCb=()=>{},this.entityCb=()=>{},this.intentCb=()=>{},this.handleWebsocketResponse=e=>{var n;this.debug&&console.log("[SpeechlyClient]","Received response",e);const{audio_context:i,segment_id:s,type:o}=e;let{data:a}=e;const r=this.activeContexts.get(i);if(void 0===r)return void console.warn("[SpeechlyClient]","Received response for non-existent context",i);let l=null!==(n=r.get(s))&&void 0!==n?n:new I(i,s);switch(o){case t.WebsocketResponseType.TentativeTranscript:a=a;const e=function(t){return t.words.map((({word:t,index:e,start_timestamp:n,end_timestamp:i})=>({value:t,index:e,startTimestamp:n,endTimestamp:i,isFinal:!1})))}(a);this.tentativeTranscriptCb(i,s,e,a.transcript),l=l.updateTranscript(e);break;case t.WebsocketResponseType.Transcript:a=a;const n=function(t){return{value:t.word,index:t.index,startTimestamp:t.start_timestamp,endTimestamp:t.end_timestamp,isFinal:!0}}(a);this.transcriptCb(i,s,n),l=l.updateTranscript([n]);break;case t.WebsocketResponseType.TentativeEntities:a=a;const o=function(t){return t.entities.map((({entity:t,value:e,start_position:n,end_position:i})=>({type:t,value:e,startPosition:n,endPosition:i,isFinal:!1})))}(a);this.tentativeEntitiesCb(i,s,o),l=l.updateEntities(o);break;case t.WebsocketResponseType.Entity:a=a;const r=function(t){return{type:t.entity,value:t.value,startPosition:t.start_position,endPosition:t.end_position,isFinal:!0}}(a);this.entityCb(i,s,r),l=l.updateEntities([r]);break;case t.WebsocketResponseType.TentativeIntent:a=a;const c=_(a,!1);this.tentativeIntentCb(i,s,c),l=l.updateIntent(c);break;case t.WebsocketResponseType.Intent:a=a;const h=_(a,!0);this.intentCb(i,s,h),l=l.updateIntent(h);break;case t.WebsocketResponseType.SegmentEnd:l=l.finalize()}r.set(s,l),this.activeContexts.set(i,r),this.logSegments&&console.info(l.toString()),this.segmentChangeCb(l.toSegment())},this.handleWebsocketClosure=e=>{if(1e3===e.code)this.debug&&console.log("[SpeechlyClient]","Websocket closed",e);else{if(console.error("[SpeechlyClient]","Websocket closed due to error",e),void 0===this.deviceId)return void this.setState(t.ClientState.Failed);this.listening=!1,this.listeningPromise=null,this.microphone.mute(),this.setState(t.ClientState.Disconnected),this.reconnect()}},this.sampleRate=null!==(n=e.sampleRate)&&void 0!==n?n:g;try{const t=window.navigator.mediaDevices.getSupportedConstraints();this.nativeResamplingSupported=!0===t.sampleRate,null!=e.autoGainControl&&e.autoGainControl?this.autoGainControl=!0===t.autoGainControl:this.autoGainControl=!1}catch(t){this.nativeResamplingSupported=!1,this.autoGainControl=!1}if(this.debug=null!==(i=e.debug)&&void 0!==i&&i,this.logSegments=null!==(s=e.logSegments)&&void 0!==s&&s,this.loginUrl=null!==(o=e.loginUrl)&&void 0!==o?o:"https://api.speechly.com/login",this.appId=null!==(a=e.appId)&&void 0!==a?a:void 0,this.projectId=null!==(r=e.projectId)&&void 0!==r?r:void 0,this.apiClient=null!==(l=e.apiClient)&&void 0!==l?l:new A,this.apiUrl=function(t,e){const n=new URLSearchParams;return n.append("sampleRate",e.toString()),`${t}?${n.toString()}`}(null!==(c=e.apiUrl)&&void 0!==c?c:"wss://api.speechly.com/ws/v1",null!==(h=e.sampleRate)&&void 0!==h?h:g),void 0!==this.appId&&void 0!==this.projectId)throw Error("[SpeechlyClient] You cannot use both appId and projectId at the same time");if(this.storage=null!==(d=e.storage)&&void 0!==d?d:new R,this.deviceId=this.storage.getOrSet("speechly-device-id",p),void 0!==window.AudioContext)this.isWebkit=!1;else{if(void 0===window.webkitAudioContext)throw m;this.isWebkit=!0}this.microphone=null!==(u=e.microphone)&&void 0!==u?u:new k(this.isWebkit,this.sampleRate,this.apiClient,this.debug),this.apiClient.onResponse(this.handleWebsocketResponse),this.apiClient.onClose(this.handleWebsocketClosure),window.SpeechlyClient=this,!1!==e.connect&&this.connect()}getReconnectDelayMs(t){return 100*Math.pow(2,t)}sleep(t){return o(this,void 0,void 0,(function*(){return new Promise((e=>setTimeout(e,t)))}))}isListening(){return this.listening}connect(){return o(this,void 0,void 0,(function*(){null===this.connectPromise&&(this.connectPromise=(()=>o(this,void 0,void 0,(function*(){this.advanceState(t.ClientState.Connecting);const e=this.storage.get(W);if(null!=e&&C(e,this.projectId,this.appId,this.deviceId))this.authToken=e;else try{this.authToken=yield function(t,e,n,i,s=fetch,a=Date.now){var r;return o(this,void 0,void 0,(function*(){let o;o=void 0!==e?{projectId:e,deviceId:i}:{appId:n,deviceId:i};const l=yield s(t,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(o)}),c=yield l.json();if(200!==l.status)throw Error(null!==(r=c.error)&&void 0!==r?r:`Speechly API login request failed with ${l.status}`);if(void 0===c.access_token)throw Error("Invalid login response from Speechly API");if(!C(c.access_token,e,n,i,a))throw Error("Invalid token received from Speechly API");return c.access_token}))}(this.loginUrl,this.projectId,this.appId,this.deviceId),this.storage.set(W,this.authToken)}catch(e){throw this.setState(t.ClientState.Failed),e}try{yield this.apiClient.initialize(this.apiUrl,this.authToken,this.sampleRate,this.debug)}catch(e){throw this.setState(t.ClientState.Failed),e}})))()),yield this.connectPromise,this.advanceState(t.ClientState.Preinitialized)}))}initialize(){return o(this,void 0,void 0,(function*(){null===this.initializePromise&&(this.initializePromise=(()=>o(this,void 0,void 0,(function*(){yield this.connect(),this.advanceState(t.ClientState.Initializing);try{if(this.isWebkit)void 0!==window.webkitAudioContext&&(this.audioContext=new window.webkitAudioContext);else{const t={};this.nativeResamplingSupported&&(t.sampleRate=this.sampleRate),this.audioContext=new window.AudioContext(t)}const e={video:!1};if(this.nativeResamplingSupported||this.autoGainControl?e.audio={sampleRate:this.sampleRate,autoGainControl:this.autoGainControl}:e.audio=!0,null==this.audioContext)throw m;this.isWebkit&&(yield this.audioContext.resume()),yield this.apiClient.setSourceSampleRate(this.audioContext.sampleRate),yield this.microphone.initialize(this.audioContext,e),this.advanceState(t.ClientState.Connected)}catch(e){switch(e){case m:this.setState(t.ClientState.NoBrowserSupport);break;case y:this.setState(t.ClientState.NoAudioConsent);break;default:this.setState(t.ClientState.Failed)}throw e}})))()),yield this.initializePromise,this.advanceState(t.ClientState.Connected)}))}close(){return o(this,void 0,void 0,(function*(){const e=[];try{yield this.microphone.close()}catch(t){e.push(t.message)}try{yield this.apiClient.close()}catch(t){e.push(t.message)}if(this.activeContexts.clear(),this.connectPromise=null,this.initializePromise=null,this.setState(t.ClientState.Disconnected),e.length>0)throw Error(e.join(","))}))}hasUnrecoverableError(){return this.state<t.ClientState.__UnrecoverableErrors}queueTask(t){return o(this,void 0,void 0,(function*(){const e=this.listeningPromise;return this.listeningPromise=(()=>o(this,void 0,void 0,(function*(){return yield e,t()})))(),this.listeningPromise}))}startContext(e){return o(this,void 0,void 0,(function*(){if(!this.hasUnrecoverableError()){if(this.listening)throw Error("Already listening");this.listening=!0;return yield this.queueTask((()=>o(this,void 0,void 0,(function*(){if(this.state<t.ClientState.Connected&&(yield this.initialize()),this.state!==t.ClientState.Connected)throw Error("[SpeechlyClient] Unable to complete startContext: Expected Connected state, but was in "+n(this.state)+". Did you call startContext multiple times without stopContext?");let i;if(this.setState(t.ClientState.Starting),this.microphone.unmute(),null!=this.projectId)i=yield this.apiClient.startContext(e);else{if(null!=e&&this.appId!==e)throw this.setState(t.ClientState.Failed),w;i=yield this.apiClient.startContext()}if(this.state!==t.ClientState.Starting)throw Error("[SpeechlyClient] Unable to complete startContext: Problem acquiring contextId");return this.activeContexts.set(i,new Map),this.setState(t.ClientState.Recording),i}))))}throw Error("[SpeechlyClient] startContext cannot be run in unrecovable error state.")}))}stopContext(){return o(this,void 0,void 0,(function*(){if(!this.hasUnrecoverableError()){if(!this.listening)throw Error("Already stopped listening");this.listening=!1;return yield this.queueTask((()=>o(this,void 0,void 0,(function*(){if(this.state!==t.ClientState.Recording)throw Error("[SpeechlyClient] Unable to complete stopContext: Expected Recording state, but was in "+n(this.state)+".");this.setState(t.ClientState.Stopping),yield this.sleep(this.contextStopDelay),this.microphone.mute();try{const e=yield this.apiClient.stopContext();return this.activeContexts.delete(e),this.setState(t.ClientState.Connected),e}catch(e){throw this.setState(t.ClientState.Failed),e}}))))}throw Error("[SpeechlyClient] stopContext cannot be run in unrecovable error state.")}))}switchContext(e){return o(this,void 0,void 0,(function*(){yield this.queueTask((()=>o(this,void 0,void 0,(function*(){if(this.state!==t.ClientState.Recording)throw Error("[SpeechlyClient] Unable to complete switchContext: Expected Recording state, but was in "+n(this.state)+".");const i=yield this.apiClient.switchContext(e);this.activeContexts.set(i,new Map)}))))}))}onStateChange(t){this.stateChangeCb=t}onSegmentChange(t){this.segmentChangeCb=t}onTentativeTranscript(t){this.tentativeTranscriptCb=t}onTranscript(t){this.transcriptCb=t}onTentativeEntities(t){this.tentativeEntitiesCb=t}onEntity(t){this.entityCb=t}onTentativeIntent(t){this.tentativeIntentCb=t}onIntent(t){this.intentCb=t}reconnect(){return o(this,void 0,void 0,(function*(){this.debug&&console.log("[SpeechlyClient]","Reconnecting...",this.connectAttempt),this.connectPromise=null,!this.hasUnrecoverableError()&&this.connectAttempt<this.maxReconnectAttemptCount?(yield this.sleep(this.getReconnectDelayMs(this.connectAttempt++)),yield this.connect()):console.error("[SpeechlyClient] Maximum reconnect count reached, giving up automatic reconnect.")}))}advanceState(t){this.state>=t||this.setState(t)}setState(t){this.state!==t&&(this.debug&&console.log("[SpeechlyClient]",n(this.state),"->",n(t)),this.state=t,this.stateChangeCb(t))}printStats(){this.microphone.printStats()}},t.DefaultSampleRate=g,t.ErrAlreadyInitialized=v,t.ErrAppIdChangeWithoutProjectLogin=w,t.ErrDeviceNotSupported=m,t.ErrKeyNotFound=x,t.ErrNoAudioConsent=y,t.ErrNoStorageSupport=T,t.ErrNotInitialized=b,t.stateToString=n,Object.defineProperty(t,"__esModule",{value:!0})}));
//# sourceMappingURL=speechly.umd.min.js.map

@@ -14,2 +14,3 @@ import { ClientOptions, StateChangeCallback, SegmentChangeCallback, TentativeTranscriptCallback, TranscriptCallback, TentativeEntitiesCallback, EntityCallback, IntentCallback } from './types';

export declare class Client {
private listening;
private readonly debug;

@@ -31,6 +32,5 @@ private readonly logSegments;

private connectAttempt;
private stoppedContextIdPromise?;
private connectPromise;
private initializePromise;
private resolveStopContext?;
private listeningPromise;
private readonly deviceId;

@@ -53,2 +53,6 @@ private authToken?;

/**
* @returns true if startContext is called and expecting stopContext next
*/
isListening(): boolean;
/**
* Connect to Speechly backend.

@@ -74,9 +78,5 @@ * This function will be called by initialize if not manually called earlier.

close(): Promise<void>;
private hasUnrecoverableError;
private queueTask;
/**
* Stops current context and immediately starts a new SLU context
* by sending a start context event to the API and unmuting the microphone.
* @param appId - unique identifier of an app in the dashboard.
*/
switchContext(appId: string): Promise<void>;
/**
* Starts a new SLU context by sending a start context event to the API and unmuting the microphone.

@@ -86,3 +86,2 @@ * @param cb - the callback which is invoked when the context start was acknowledged by the API.

startContext(appId?: string): Promise<string>;
private _startContext;
/**

@@ -93,4 +92,9 @@ * Stops current SLU context by sending a stop context event to the API and muting the microphone

stopContext(): Promise<string>;
private _stopContext;
/**
* Stops current context and immediately starts a new SLU context
* by sending a start context event to the API and unmuting the microphone.
* @param appId - unique identifier of an app in the dashboard.
*/
switchContext(appId: string): Promise<void>;
/**
* Adds a listener for client state change events.

@@ -138,2 +142,3 @@ * @param cb - the callback to invoke on state change events.

private reconnect;
private advanceState;
private setState;

@@ -140,0 +145,0 @@ /**

@@ -114,9 +114,12 @@ import { Microphone } from '../microphone';

NoAudioConsent = 2,
Disconnected = 3,
Disconnecting = 4,
Connecting = 5,
Connected = 6,
Starting = 7,
Stopping = 8,
Recording = 9
__UnrecoverableErrors = 3,
Disconnected = 4,
Disconnecting = 5,
Connecting = 6,
Preinitialized = 7,
Initializing = 8,
Connected = 9,
Stopping = 10,
Starting = 11,
Recording = 12
}

@@ -123,0 +126,0 @@ /**

@@ -1,2 +0,2 @@

declare const _default: "/**\n * Known WebSocket response types.\n * @public\n */\nvar WebsocketResponseType;\n(function (WebsocketResponseType) {\n WebsocketResponseType[\"Opened\"] = \"WEBSOCKET_OPEN\";\n WebsocketResponseType[\"SourceSampleRateSetSuccess\"] = \"SOURSE_SAMPLE_RATE_SET_SUCCESS\";\n WebsocketResponseType[\"Started\"] = \"started\";\n WebsocketResponseType[\"Stopped\"] = \"stopped\";\n})(WebsocketResponseType || (WebsocketResponseType = {}));\nvar CONTROL = {\n WRITE_INDEX: 0,\n FRAMES_AVAILABLE: 1,\n LOCK: 2\n};\nvar WebsocketClient = /** @class */ (function () {\n function WebsocketClient(ctx) {\n var _this = this;\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n this.shouldResendLastFramesSent = false;\n this.buffer = new Float32Array(0);\n this.lastFramesSent = new Int16Array(0); // to re-send after switch context\n this.debug = false;\n this.initialized = false;\n // WebSocket's close handler, called e.g. when\n // - normal close (code 1000)\n // - network unreachable or unable to (re)connect (code 1006)\n // List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code\n this.onWebsocketClose = function (event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'onWebsocketClose');\n }\n _this.websocket.removeEventListener('open', _this.onWebsocketOpen);\n _this.websocket.removeEventListener('message', _this.onWebsocketMessage);\n _this.websocket.removeEventListener('error', _this.onWebsocketError);\n _this.websocket.removeEventListener('close', _this.onWebsocketClose);\n _this.websocket = undefined;\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_CLOSED', code: event.code, reason: event.reason, wasClean: event.wasClean });\n };\n this.onWebsocketOpen = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket opened');\n }\n if (_this.isContextStarted && !_this.isStartContextConfirmed) {\n _this.send(_this.outbox);\n }\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });\n };\n this.onWebsocketError = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket error');\n }\n };\n this.onWebsocketMessage = function (event) {\n var response;\n try {\n response = JSON.parse(event.data);\n }\n catch (e) {\n console.error('[SpeechlyClient] Error parsing response from the server:', e);\n return;\n }\n if (response.type === WebsocketResponseType.Started) {\n _this.isStartContextConfirmed = true;\n if (_this.shouldResendLastFramesSent) {\n _this.resendLastFrames();\n _this.shouldResendLastFramesSent = false;\n }\n }\n _this.workerCtx.postMessage(response);\n };\n this.workerCtx = ctx;\n }\n WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {\n this.debug = debug;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'initialize worker');\n }\n this.apiUrl = apiUrl;\n this.authToken = authToken;\n this.targetSampleRate = targetSampleRate;\n this.initialized = true;\n this.connect(0);\n };\n WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {\n this.sourceSampleRate = sourceSampleRate;\n this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'resampleRatio', this.resampleRatio);\n }\n if (this.resampleRatio > 1) {\n this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);\n }\n this.workerCtx.postMessage({ type: 'SOURSE_SAMPLE_RATE_SET_SUCCESS' });\n if (isNaN(this.resampleRatio)) {\n throw Error(\"resampleRatio is NaN source rate is \".concat(this.sourceSampleRate, \" and target rate is \").concat(this.targetSampleRate));\n }\n };\n WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {\n this.controlSAB = new Int32Array(controlSAB);\n this.dataSAB = new Float32Array(dataSAB);\n var audioHandleInterval = this.dataSAB.length / 32; // ms\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Audio handle interval', audioHandleInterval, 'ms');\n }\n setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);\n };\n WebsocketClient.prototype.connect = function (timeout) {\n if (timeout === void 0) { timeout = 1000; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connect in ', timeout / 1000, 'sec');\n }\n setTimeout(this.initializeWebsocket.bind(this), timeout);\n };\n WebsocketClient.prototype.initializeWebsocket = function () {\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connecting to ', this.apiUrl);\n }\n this.websocket = new WebSocket(this.apiUrl, this.authToken);\n this.websocket.addEventListener('open', this.onWebsocketOpen);\n this.websocket.addEventListener('message', this.onWebsocketMessage);\n this.websocket.addEventListener('error', this.onWebsocketError);\n this.websocket.addEventListener('close', this.onWebsocketClose);\n };\n WebsocketClient.prototype.isOpen = function () {\n return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;\n };\n WebsocketClient.prototype.resendLastFrames = function () {\n if (this.lastFramesSent.length > 0) {\n this.send(this.lastFramesSent);\n this.lastFramesSent = new Int16Array(0);\n }\n };\n WebsocketClient.prototype.sendAudio = function (audioChunk) {\n if (!this.isContextStarted) {\n return;\n }\n if (audioChunk.length > 0) {\n if (this.resampleRatio > 1) {\n // Downsampling\n this.send(this.downsample(audioChunk));\n }\n else {\n this.send(float32ToInt16(audioChunk));\n }\n }\n };\n WebsocketClient.prototype.sendAudioFromSAB = function () {\n if (!this.isContextStarted) {\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n return;\n }\n if (this.controlSAB == undefined) {\n return;\n }\n var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];\n var lock = this.controlSAB[CONTROL.LOCK];\n if (lock == 0 && framesAvailable > 0) {\n var data = this.dataSAB.subarray(0, framesAvailable);\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n if (data.length > 0) {\n var frames_1;\n if (this.resampleRatio > 1) {\n frames_1 = this.downsample(data);\n }\n else {\n frames_1 = float32ToInt16(data);\n }\n this.send(frames_1);\n // 16000 per second, 1000 in 100 ms\n // save last 250 ms\n if (this.lastFramesSent.length > 1024 * 4) {\n this.lastFramesSent = frames_1;\n }\n else {\n var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);\n concat.set(this.lastFramesSent);\n concat.set(frames_1, this.lastFramesSent.length);\n this.lastFramesSent = concat;\n }\n }\n }\n };\n WebsocketClient.prototype.startContext = function (appId) {\n if (this.isContextStarted) {\n console.log('Cant start context: it has been already started');\n return;\n }\n this.isContextStarted = true;\n this.isStartContextConfirmed = false;\n if (appId !== undefined) {\n this.outbox = JSON.stringify({ event: 'start', appId: appId });\n }\n else {\n this.outbox = JSON.stringify({ event: 'start' });\n }\n this.send(this.outbox);\n };\n WebsocketClient.prototype.stopContext = function () {\n if (!this.websocket) {\n throw Error('Cant start context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant stop context: it is not started');\n return;\n }\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n };\n WebsocketClient.prototype.switchContext = function (newAppId) {\n if (!this.websocket) {\n throw Error('Cant switch context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant switch context: it is not started');\n return;\n }\n if (newAppId == undefined) {\n console.log('Cant switch context: new app id is undefined');\n return;\n }\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n this.shouldResendLastFramesSent = true;\n this.send(JSON.stringify({ event: 'start', appId: newAppId }));\n };\n WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {\n if (websocketCode === void 0) { websocketCode = 1005; }\n if (reason === void 0) { reason = \"No Status Received\"; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Websocket closing');\n }\n if (!this.websocket) {\n throw Error('Websocket is not open');\n }\n this.websocket.close(websocketCode, reason);\n };\n WebsocketClient.prototype.downsample = function (input) {\n var inputBuffer = new Float32Array(this.buffer.length + input.length);\n inputBuffer.set(this.buffer, 0);\n inputBuffer.set(input, this.buffer.length);\n var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);\n var outputBuffer = new Int16Array(outputLength);\n for (var i = 0; i < outputLength; i++) {\n var offset = Math.round(this.resampleRatio * i);\n var val = 0.0;\n for (var j = 0; j < this.filter.length; j++) {\n val += inputBuffer[offset + j] * this.filter[j];\n }\n outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);\n }\n var remainingOffset = Math.round(this.resampleRatio * outputLength);\n if (remainingOffset < inputBuffer.length) {\n this.buffer = inputBuffer.subarray(remainingOffset);\n }\n else {\n this.buffer = new Float32Array(0);\n }\n return outputBuffer;\n };\n WebsocketClient.prototype.send = function (data) {\n if (!this.isOpen()) {\n throw Error('Cant send data: websocket is inactive');\n }\n try {\n this.websocket.send(data);\n }\n catch (error) {\n console.log('[SpeechlyClient]', 'Server connection error', error);\n }\n };\n return WebsocketClient;\n}());\nvar ctx = self;\nvar websocketClient = new WebsocketClient(ctx);\nctx.onmessage = function (e) {\n switch (e.data.type) {\n case 'INIT':\n websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);\n break;\n case 'SET_SOURSE_SAMPLE_RATE':\n websocketClient.setSourceSampleRate(e.data.sourceSampleRate);\n break;\n case 'SET_SHARED_ARRAY_BUFFERS':\n websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);\n break;\n case 'CLOSE':\n websocketClient.closeWebsocket(1000, \"Close requested by client\");\n break;\n case 'START_CONTEXT':\n websocketClient.startContext(e.data.appId);\n break;\n case 'SWITCH_CONTEXT':\n websocketClient.switchContext(e.data.appId);\n break;\n case 'STOP_CONTEXT':\n websocketClient.stopContext();\n break;\n case 'AUDIO':\n websocketClient.sendAudio(e.data.payload);\n break;\n default:\n console.log('WORKER', e);\n }\n};\nfunction float32ToInt16(buffer) {\n var buf = new Int16Array(buffer.length);\n for (var l = 0; l < buffer.length; l++) {\n buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);\n }\n return buf;\n}\nfunction generateFilter(sourceSampleRate, targetSampleRate, length) {\n if (length % 2 === 0) {\n throw Error('Filter length must be odd');\n }\n var cutoff = targetSampleRate / 2;\n var filter = new Float32Array(length);\n var sum = 0;\n for (var i = 0; i < length; i++) {\n var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));\n sum += x;\n filter[i] = x;\n }\n for (var i = 0; i < length; i++) {\n filter[i] = filter[i] / sum;\n }\n return filter;\n}\nfunction sinc(x) {\n if (x === 0.0) {\n return 1.0;\n }\n var piX = Math.PI * x;\n return Math.sin(piX) / piX;\n}\n";
declare const _default: "/**\n * Known WebSocket response types.\n * @public\n */\nvar WebsocketResponseType;\n(function (WebsocketResponseType) {\n WebsocketResponseType[\"Opened\"] = \"WEBSOCKET_OPEN\";\n WebsocketResponseType[\"SourceSampleRateSetSuccess\"] = \"SOURSE_SAMPLE_RATE_SET_SUCCESS\";\n WebsocketResponseType[\"Started\"] = \"started\";\n WebsocketResponseType[\"Stopped\"] = \"stopped\";\n})(WebsocketResponseType || (WebsocketResponseType = {}));\nvar CONTROL = {\n WRITE_INDEX: 0,\n FRAMES_AVAILABLE: 1,\n LOCK: 2\n};\nvar WebsocketClient = /** @class */ (function () {\n function WebsocketClient(ctx) {\n var _this = this;\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n this.shouldResendLastFramesSent = false;\n this.buffer = new Float32Array(0);\n this.lastFramesSent = new Int16Array(0); // to re-send after switch context\n this.debug = false;\n this.initialized = false;\n // WebSocket's close handler, called e.g. when\n // - normal close (code 1000)\n // - network unreachable or unable to (re)connect (code 1006)\n // List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code\n this.onWebsocketClose = function (event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'onWebsocketClose');\n }\n _this.websocket.removeEventListener('open', _this.onWebsocketOpen);\n _this.websocket.removeEventListener('message', _this.onWebsocketMessage);\n _this.websocket.removeEventListener('error', _this.onWebsocketError);\n _this.websocket.removeEventListener('close', _this.onWebsocketClose);\n _this.websocket = undefined;\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_CLOSED', code: event.code, reason: event.reason, wasClean: event.wasClean });\n };\n this.onWebsocketOpen = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket opened');\n }\n if (_this.isContextStarted && !_this.isStartContextConfirmed) {\n _this.send(_this.outbox);\n }\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });\n };\n this.onWebsocketError = function (_event) {\n if (_this.debug) {\n console.log('[SpeechlyClient]', 'websocket error');\n }\n };\n this.onWebsocketMessage = function (event) {\n var response;\n try {\n response = JSON.parse(event.data);\n }\n catch (e) {\n console.error('[SpeechlyClient] Error parsing response from the server:', e);\n return;\n }\n if (response.type === WebsocketResponseType.Started) {\n _this.isStartContextConfirmed = true;\n if (_this.shouldResendLastFramesSent) {\n _this.resendLastFrames();\n _this.shouldResendLastFramesSent = false;\n }\n }\n _this.workerCtx.postMessage(response);\n };\n this.workerCtx = ctx;\n }\n WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {\n this.debug = debug;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'initialize worker');\n }\n this.apiUrl = apiUrl;\n this.authToken = authToken;\n this.targetSampleRate = targetSampleRate;\n this.initialized = true;\n this.isContextStarted = false;\n this.connect(0);\n };\n WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {\n this.sourceSampleRate = sourceSampleRate;\n this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;\n if (this.debug) {\n console.log('[SpeechlyClient]', 'resampleRatio', this.resampleRatio);\n }\n if (this.resampleRatio > 1) {\n this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);\n }\n this.workerCtx.postMessage({ type: 'SOURSE_SAMPLE_RATE_SET_SUCCESS' });\n if (isNaN(this.resampleRatio)) {\n throw Error(\"resampleRatio is NaN source rate is \".concat(this.sourceSampleRate, \" and target rate is \").concat(this.targetSampleRate));\n }\n };\n WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {\n this.controlSAB = new Int32Array(controlSAB);\n this.dataSAB = new Float32Array(dataSAB);\n var audioHandleInterval = this.dataSAB.length / 32; // ms\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Audio handle interval', audioHandleInterval, 'ms');\n }\n setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);\n };\n WebsocketClient.prototype.connect = function (timeout) {\n if (timeout === void 0) { timeout = 1000; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connect in ', timeout / 1000, 'sec');\n }\n setTimeout(this.initializeWebsocket.bind(this), timeout);\n };\n WebsocketClient.prototype.initializeWebsocket = function () {\n if (this.debug) {\n console.log('[SpeechlyClient]', 'connecting to ', this.apiUrl);\n }\n this.websocket = new WebSocket(this.apiUrl, this.authToken);\n this.websocket.addEventListener('open', this.onWebsocketOpen);\n this.websocket.addEventListener('message', this.onWebsocketMessage);\n this.websocket.addEventListener('error', this.onWebsocketError);\n this.websocket.addEventListener('close', this.onWebsocketClose);\n };\n WebsocketClient.prototype.isOpen = function () {\n return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;\n };\n WebsocketClient.prototype.resendLastFrames = function () {\n if (this.lastFramesSent.length > 0) {\n this.send(this.lastFramesSent);\n this.lastFramesSent = new Int16Array(0);\n }\n };\n WebsocketClient.prototype.sendAudio = function (audioChunk) {\n if (!this.isContextStarted) {\n return;\n }\n if (audioChunk.length > 0) {\n if (this.resampleRatio > 1) {\n // Downsampling\n this.send(this.downsample(audioChunk));\n }\n else {\n this.send(float32ToInt16(audioChunk));\n }\n }\n };\n WebsocketClient.prototype.sendAudioFromSAB = function () {\n if (!this.isContextStarted) {\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n return;\n }\n if (this.controlSAB == undefined) {\n return;\n }\n var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];\n var lock = this.controlSAB[CONTROL.LOCK];\n if (lock == 0 && framesAvailable > 0) {\n var data = this.dataSAB.subarray(0, framesAvailable);\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n if (data.length > 0) {\n var frames_1;\n if (this.resampleRatio > 1) {\n frames_1 = this.downsample(data);\n }\n else {\n frames_1 = float32ToInt16(data);\n }\n this.send(frames_1);\n // 16000 per second, 1000 in 100 ms\n // save last 250 ms\n if (this.lastFramesSent.length > 1024 * 4) {\n this.lastFramesSent = frames_1;\n }\n else {\n var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);\n concat.set(this.lastFramesSent);\n concat.set(frames_1, this.lastFramesSent.length);\n this.lastFramesSent = concat;\n }\n }\n }\n };\n WebsocketClient.prototype.startContext = function (appId) {\n if (this.isContextStarted) {\n console.log('Cant start context: it has been already started');\n return;\n }\n this.isContextStarted = true;\n this.isStartContextConfirmed = false;\n if (appId !== undefined) {\n this.outbox = JSON.stringify({ event: 'start', appId: appId });\n }\n else {\n this.outbox = JSON.stringify({ event: 'start' });\n }\n this.send(this.outbox);\n };\n WebsocketClient.prototype.stopContext = function () {\n if (!this.websocket) {\n throw Error('Cant start context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant stop context: it is not started');\n return;\n }\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n };\n WebsocketClient.prototype.switchContext = function (newAppId) {\n if (!this.websocket) {\n throw Error('Cant switch context: websocket is undefined');\n }\n if (!this.isContextStarted) {\n console.log('Cant switch context: it is not started');\n return;\n }\n if (newAppId == undefined) {\n console.log('Cant switch context: new app id is undefined');\n return;\n }\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n this.shouldResendLastFramesSent = true;\n this.send(JSON.stringify({ event: 'start', appId: newAppId }));\n };\n WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {\n if (websocketCode === void 0) { websocketCode = 1005; }\n if (reason === void 0) { reason = \"No Status Received\"; }\n if (this.debug) {\n console.log('[SpeechlyClient]', 'Websocket closing');\n }\n if (!this.websocket) {\n throw Error('Websocket is not open');\n }\n this.websocket.close(websocketCode, reason);\n };\n WebsocketClient.prototype.downsample = function (input) {\n var inputBuffer = new Float32Array(this.buffer.length + input.length);\n inputBuffer.set(this.buffer, 0);\n inputBuffer.set(input, this.buffer.length);\n var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);\n var outputBuffer = new Int16Array(outputLength);\n for (var i = 0; i < outputLength; i++) {\n var offset = Math.round(this.resampleRatio * i);\n var val = 0.0;\n for (var j = 0; j < this.filter.length; j++) {\n val += inputBuffer[offset + j] * this.filter[j];\n }\n outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);\n }\n var remainingOffset = Math.round(this.resampleRatio * outputLength);\n if (remainingOffset < inputBuffer.length) {\n this.buffer = inputBuffer.subarray(remainingOffset);\n }\n else {\n this.buffer = new Float32Array(0);\n }\n return outputBuffer;\n };\n WebsocketClient.prototype.send = function (data) {\n if (this.isOpen()) {\n try {\n this.websocket.send(data);\n }\n catch (error) {\n console.log('[SpeechlyClient]', 'Server connection error', error);\n }\n }\n };\n return WebsocketClient;\n}());\nvar ctx = self;\nvar websocketClient = new WebsocketClient(ctx);\nctx.onmessage = function (e) {\n switch (e.data.type) {\n case 'INIT':\n websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);\n break;\n case 'SET_SOURSE_SAMPLE_RATE':\n websocketClient.setSourceSampleRate(e.data.sourceSampleRate);\n break;\n case 'SET_SHARED_ARRAY_BUFFERS':\n websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);\n break;\n case 'CLOSE':\n websocketClient.closeWebsocket(1000, \"Close requested by client\");\n break;\n case 'START_CONTEXT':\n websocketClient.startContext(e.data.appId);\n break;\n case 'SWITCH_CONTEXT':\n websocketClient.switchContext(e.data.appId);\n break;\n case 'STOP_CONTEXT':\n websocketClient.stopContext();\n break;\n case 'AUDIO':\n websocketClient.sendAudio(e.data.payload);\n break;\n default:\n console.log('WORKER', e);\n }\n};\nfunction float32ToInt16(buffer) {\n var buf = new Int16Array(buffer.length);\n for (var l = 0; l < buffer.length; l++) {\n buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);\n }\n return buf;\n}\nfunction generateFilter(sourceSampleRate, targetSampleRate, length) {\n if (length % 2 === 0) {\n throw Error('Filter length must be odd');\n }\n var cutoff = targetSampleRate / 2;\n var filter = new Float32Array(length);\n var sum = 0;\n for (var i = 0; i < length; i++) {\n var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));\n sum += x;\n filter[i] = x;\n }\n for (var i = 0; i < length; i++) {\n filter[i] = filter[i] / sum;\n }\n return filter;\n}\nfunction sinc(x) {\n if (x === 0.0) {\n return 1.0;\n }\n var piX = Math.PI * x;\n return Math.sin(piX) / piX;\n}\n";
export default _default;
{
"name": "@speechly/browser-client",
"version": "1.3.0",
"version": "1.4.0",
"description": "Browser client for Speechly API",

@@ -5,0 +5,0 @@ "keywords": [

@@ -31,3 +31,3 @@ <div align="center" markdown="1">

NOTE: If you are using React, you can use our [React client](https://github.com/speechly/speechly/libraries/react-client) instead. It provides the same functionalities, but provides a programming model that is idiomatic to React.
NOTE: If you are using React, you can use our [React client](https://github.com/speechly/speechly/tree/main/libraries/react-client) instead. It provides the same functionalities, but provides a programming model that is idiomatic to React.

@@ -64,7 +64,7 @@ ## Usage with Node

// Start recording.
// Ideally this should be bound to e.g. a button press.
// This can be bound to e.g. a button press.
await client.startContext()
// Stop recording after a timeout.
// Ideally this should be bound to e.g. a button press.
// This can be bound to e.g. a button press.
setTimeout(async function() {

@@ -85,13 +85,12 @@ await client.stopContext()

<input id="textBox" type="text" placeholder="Hold to talk..." autofocus>
<input id="textBox" type="text" placeholder="Hold to talk..." autofocus />
<script type="module">
// Load Speechly ES module from a CDN. Note script type="module"
import { Client, ClientState } from "https://unpkg.com/@speechly/browser-client/core/speechly.es.js"
import { Client } from "../core/speechly.es.js"
const widget = document.getElementById("textBox")
let clientState = ClientState.Disconnected;
// Create a Speechly client instance. NOTE: Configure and get your appId from https://api.speechly.com/dashboard
const client = new Client({
const speechly = new Client({
appId: "your-app-id",

@@ -102,7 +101,3 @@ debug: true,

client.onStateChange(state => {
clientState = state;
});
client.onSegmentChange(segment => {
speechly.onSegmentChange(segment => {
// Clean up and concatenate words

@@ -115,20 +110,10 @@ let transcript = segment.words.map(w => w.value.toLowerCase()).filter(w => w !== "").join(" ");

const startListening = async () => {
switch (clientState) {
case ClientState.Disconnected:
await client.initialize();
// fall through
case ClientState.Connected:
widget.value = "Listening..."
client.startContext();
break;
}
speechly.startContext();
}
const stopListening = () => {
switch (clientState) {
case ClientState.Starting:
case ClientState.Recording:
client.stopContext();
break;
if (speechly.isListening()) {
speechly.stopContext();
}

@@ -135,0 +120,0 @@ }

@@ -61,2 +61,3 @@ import { v4 as uuidv4 } from 'uuid'

export class Client {
private listening: boolean = false
private readonly debug: boolean

@@ -79,7 +80,6 @@ private readonly logSegments: boolean

private connectAttempt: number = 0
private stoppedContextIdPromise?: Promise<string>
private connectPromise: Promise<void> | null
private initializePromise: Promise<void> | null
private connectPromise: Promise<void> | null = null
private initializePromise: Promise<void> | null = null
private listeningPromise: Promise<any> | null = null
private resolveStopContext?: (value?: unknown) => void
private readonly deviceId: string

@@ -146,5 +146,2 @@ private authToken?: string

this.connectPromise = null
this.initializePromise = null
window.SpeechlyClient = this

@@ -167,2 +164,9 @@

/**
* @returns true if startContext is called and expecting stopContext next
*/
public isListening(): boolean {
return this.listening
}
/**
* Connect to Speechly backend.

@@ -176,3 +180,3 @@ * This function will be called by initialize if not manually called earlier.

this.connectPromise = (async () => {
await this.sleep(this.getReconnectDelayMs(this.connectAttempt++))
this.advanceState(ClientState.Connecting)
// Get auth token from cache or renew it

@@ -203,2 +207,3 @@ const storedToken = this.storage.get(authTokenKey)

await this.connectPromise
this.advanceState(ClientState.Preinitialized)
}

@@ -217,7 +222,6 @@

// Ensure we're connected. Returns immediately if we are
await this.connect()
if (this.initializePromise === null) {
this.initializePromise = (async () => {
this.setState(ClientState.Connecting)
await this.connect()
this.advanceState(ClientState.Initializing)
try {

@@ -263,5 +267,5 @@ if (this.isWebkit) {

}
// 3. Initialise websocket.
await this.apiClient.setSourceSampleRate(this.audioContext.sampleRate)
await this.microphone.initialize(this.audioContext, mediaStreamConstraints)
this.advanceState(ClientState.Connected)
} else {

@@ -284,7 +288,6 @@ throw ErrDeviceNotSupported

}
this.setState(ClientState.Connected)
})()
}
await this.initializePromise
this.advanceState(ClientState.Connected)
}

@@ -322,15 +325,15 @@

/**
* Stops current context and immediately starts a new SLU context
* by sending a start context event to the API and unmuting the microphone.
* @param appId - unique identifier of an app in the dashboard.
*/
async switchContext(appId: string): Promise<void> {
if (this.state === ClientState.Recording) {
this.resolveStopContext = undefined
const contextId = await this.apiClient.switchContext(appId)
this.activeContexts.set(contextId, new Map<number, SegmentState>())
}
private hasUnrecoverableError(): boolean {
return this.state < ClientState.__UnrecoverableErrors
}
private async queueTask(task: () => Promise<any>): Promise<any> {
const prevTask = this.listeningPromise
this.listeningPromise = (async () => {
await prevTask
return task()
})()
return this.listeningPromise
}
/**

@@ -340,48 +343,46 @@ * Starts a new SLU context by sending a start context event to the API and unmuting the microphone.

*/
async startContext(appId?: string): Promise<string> {
// Ensure we're initialized; returns immediately if we are
await this.initialize()
public async startContext(appId?: string): Promise<string> {
if (!this.hasUnrecoverableError()) {
if (this.listening) {
throw Error('Already listening')
}
this.listening = true
if (this.resolveStopContext != null) {
this.resolveStopContext()
await this.stoppedContextIdPromise
}
const contextId = await this.queueTask(async () => {
if (this.state < ClientState.Connected) {
await this.initialize()
}
if (this.state !== ClientState.Connected) {
throw Error('[SpeechlyClient] Unable to complete startContext: Expected Connected state, but was in ' + stateToString(this.state) + '. Did you call startContext multiple times without stopContext?')
}
this.setState(ClientState.Starting)
if (this.state === ClientState.Disconnected || this.state === ClientState.Connecting) {
throw Error('Cannot start context - client is not connected')
}
this.microphone.unmute()
this.setState(ClientState.Starting)
const contextId: string = await this._startContext(appId)
return contextId
}
// Fetch context id
let contextId: string
if (this.projectId != null) {
contextId = await this.apiClient.startContext(appId)
} else {
if (appId != null && this.appId !== appId) {
this.setState(ClientState.Failed)
throw ErrAppIdChangeWithoutProjectLogin
}
contextId = await this.apiClient.startContext()
}
private async _startContext(appId?: string): Promise<string> {
let contextId: string
try {
if (this.projectId != null) {
contextId = await this.apiClient.startContext(appId)
} else {
if (appId != null && this.appId !== appId) {
throw ErrAppIdChangeWithoutProjectLogin
// Ensure state has not been changed by await apiClient.startContext() due to websocket errors.
// Due to apiClient.startContext implementation, they don't throw an error here, but call handleWebsocketClosure instead which changes to ClientState.Disconnected
// @ts-ignore
if (this.state !== ClientState.Starting) {
throw Error('[SpeechlyClient] Unable to complete startContext: Problem acquiring contextId')
}
contextId = await this.apiClient.startContext()
}
} catch (err) {
switch (err) {
case ErrAppIdChangeWithoutProjectLogin:
this.setState(ClientState.Failed)
break
default:
this.setState(ClientState.Connected)
}
throw err
this.activeContexts.set(contextId, new Map<number, SegmentState>())
this.setState(ClientState.Recording)
return contextId
})
return contextId
}
this.setState(ClientState.Recording)
this.microphone.unmute()
this.activeContexts.set(contextId, new Map<number, SegmentState>())
return contextId
throw Error('[SpeechlyClient] startContext cannot be run in unrecovable error state.')
}

@@ -394,49 +395,44 @@

async stopContext(): Promise<string> {
if (this.state !== ClientState.Recording && this.state !== ClientState.Starting) {
throw Error('Cannot stop context - client is not recording')
if (!this.hasUnrecoverableError()) {
if (!this.listening) {
throw Error('Already stopped listening')
}
this.listening = false
const contextId = await this.queueTask(async () => {
if (this.state !== ClientState.Recording) {
throw Error('[SpeechlyClient] Unable to complete stopContext: Expected Recording state, but was in ' + stateToString(this.state) + '.')
}
this.setState(ClientState.Stopping)
await this.sleep(this.contextStopDelay)
this.microphone.mute()
try {
const contextId = await this.apiClient.stopContext()
this.activeContexts.delete(contextId)
this.setState(ClientState.Connected)
return contextId
} catch (err) {
this.setState(ClientState.Failed)
throw err
}
})
return contextId
}
throw Error('[SpeechlyClient] stopContext cannot be run in unrecovable error state.')
}
this.setState(ClientState.Stopping)
this.stoppedContextIdPromise = new Promise(resolve => {
Promise.race([
new Promise(resolve => setTimeout(resolve, this.contextStopDelay)), // timeout
new Promise(resolve => {
this.resolveStopContext = resolve
}),
])
.then(() => {
this._stopContext()
.then(id => {
resolve(id)
})
.catch(err => {
throw err
})
})
.catch(err => {
throw err
})
/**
* Stops current context and immediately starts a new SLU context
* by sending a start context event to the API and unmuting the microphone.
* @param appId - unique identifier of an app in the dashboard.
*/
async switchContext(appId: string): Promise<void> {
await this.queueTask(async () => {
if (this.state !== ClientState.Recording) {
throw Error('[SpeechlyClient] Unable to complete switchContext: Expected Recording state, but was in ' + stateToString(this.state) + '.')
}
const contextId = await this.apiClient.switchContext(appId)
this.activeContexts.set(contextId, new Map<number, SegmentState>())
})
const contextId: string = await this.stoppedContextIdPromise
this.setState(ClientState.Connected)
this.activeContexts.delete(contextId)
return contextId
}
private async _stopContext(): Promise<string> {
this.microphone.mute()
let contextId: string
try {
contextId = await this.apiClient.stopContext()
} catch (err) {
this.setState(ClientState.Failed)
throw err
}
return contextId
}
/**

@@ -587,5 +583,3 @@ * Adds a listener for client state change events.

} else {
if (this.debug) {
console.error('[SpeechlyClient]', 'Websocket closed due to error', err)
}
console.error('[SpeechlyClient]', 'Websocket closed due to error', err)

@@ -598,2 +592,9 @@ // If for some reason deviceId is missing, there's nothing else we can do but fail completely.

// Reset
this.listening = false
this.listeningPromise = null
this.microphone.mute()
this.setState(ClientState.Disconnected)
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.reconnect()

@@ -603,16 +604,23 @@ }

private reconnect(): void {
private async reconnect(): Promise<void> {
if (this.debug) {
console.log('[SpeechlyClient]', 'Reconnecting...', this.connectAttempt)
}
if (this.state !== ClientState.Failed && this.connectAttempt < this.maxReconnectAttemptCount) {
this.connectPromise = null
this.connectPromise = null
if (!this.hasUnrecoverableError() && this.connectAttempt < this.maxReconnectAttemptCount) {
await this.sleep(this.getReconnectDelayMs(this.connectAttempt++))
// eslint-disable-next-line @typescript-eslint/no-floating-promises
this.connect()
await this.connect()
} else {
console.error('[SpeechlyClient] Maximum reconnect count reached, giving up.')
this.setState(ClientState.Failed)
console.error('[SpeechlyClient] Maximum reconnect count reached, giving up automatic reconnect.')
}
}
private advanceState(newState: ClientState): void {
if (this.state >= newState) {
return
}
this.setState(newState)
}
private setState(newState: ClientState): void {

@@ -624,3 +632,3 @@ if (this.state === newState) {

if (this.debug) {
console.log('[SpeechlyClient]', 'State transition', stateToString(this.state), stateToString(newState))
console.log('[SpeechlyClient]', stateToString(this.state), '->', stateToString(newState))
}

@@ -627,0 +635,0 @@

@@ -21,6 +21,8 @@ import { ClientState } from './types'

[ClientState.Connecting, 'Connecting'],
[ClientState.Preinitialized, 'Preinitialized'],
[ClientState.Initializing, 'Initializing'],
[ClientState.Connected, 'Connected'],
[ClientState.Stopping, 'Stopping'],
[ClientState.Starting, 'Starting'],
[ClientState.Stopping, 'Stopping'],
[ClientState.Recording, 'Recording'],
])

@@ -134,8 +134,11 @@ import { Microphone } from '../microphone'

NoAudioConsent,
__UnrecoverableErrors,
Disconnected,
Disconnecting,
Connecting,
Preinitialized,
Initializing,
Connected,
Stopping,
Starting,
Stopping,
Recording,

@@ -142,0 +145,0 @@ }

@@ -40,2 +40,6 @@ import { APIClient, ResponseCallback, CloseCallback, WebsocketResponse, WebsocketResponseType } from './types'

// Reset
this.startCbs = []
this.stopCbs = []
return new Promise(resolve => {

@@ -42,0 +46,0 @@ this.resolveInitialization = resolve

@@ -85,2 +85,3 @@ export default `/**

this.initialized = true;
this.isContextStarted = false;
this.connect(0);

@@ -270,11 +271,10 @@ };

WebsocketClient.prototype.send = function (data) {
if (!this.isOpen()) {
throw Error('Cant send data: websocket is inactive');
if (this.isOpen()) {
try {
this.websocket.send(data);
}
catch (error) {
console.log('[SpeechlyClient]', 'Server connection error', error);
}
}
try {
this.websocket.send(data);
}
catch (error) {
console.log('[SpeechlyClient]', 'Server connection error', error);
}
};

@@ -281,0 +281,0 @@ return WebsocketClient;

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc