Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@speechly/browser-client

Package Overview
Dependencies
Maintainers
6
Versions
79
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@speechly/browser-client - npm Package Compare versions

Comparing version 2.0.1 to 2.1.0-beta.0

core/types/audioprocessing/AudioProcessor.d.ts

4

core/speechly.umd.min.js

@@ -1,4 +0,4 @@

!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).Speechly={})}(this,(function(t){"use strict";const e=new Error("Current device does not support microphone API"),n=new Error("AppId changed without project login"),i=16e3;class s{constructor(t,e){this.isFinalized=!1,this.words=[],this.entities=new Map,this.intent={intent:"",isFinal:!1},this.contextId=t,this.id=e}toSegment(){let t=0;const e=new Array(this.entities.size);return this.entities.forEach((n=>{e[t]=n,t++})),{id:this.id,contextId:this.contextId,isFinal:this.isFinalized,words:this.words,entities:e,intent:this.intent}}toString(){const t=this.toSegment(),e=t.words.filter((t=>t.value)).map((t=>({value:t.value,index:t.index}))),n=Object.assign(Object.assign({},t),{words:e});return JSON.stringify(n,null,2)}updateTranscript(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||(this.words[t.index]=t)})),this}updateEntities(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||this.entities.set(function(t){return`${t.startPosition.toString()}:${t.endPosition.toString()}`}(t),t)})),this}updateIntent(t){return this.isFinalized&&!t.isFinal||(this.intent=t),this}finalize(){return this.entities.forEach(((t,e)=>{t.isFinal||this.entities.delete(e)})),this.words=this.words.filter((t=>t.isFinal)),this.intent.isFinal||(this.intent.intent="",this.intent.isFinal=!0),this.isFinalized=!0,this}}function o(t,e,n,i){return new(n||(n=Promise))((function(s,o){function r(t){try{c(i.next(t))}catch(t){o(t)}}function a(t){try{c(i.throw(t))}catch(t){o(t)}}function c(t){var e;t.done?s(t.value):(e=t.value,e instanceof n?e:new n((function(t){t(e)}))).then(r,a)}c((i=i.apply(t,e||[])).next())}))}const r=new Error("Microphone is not initialized"),a=new Error("Microphone is already initialized"),c=new Error("Microphone consent is not given");var d;t.WebsocketResponseType=void 0,(d=t.WebsocketResponseType||(t.WebsocketResponseType={})).Opened="WEBSOCKET_OPEN",d.Closed="WEBSOCKET_CLOSED",d.SourceSampleRateSetSuccess="SOURCE_SAMPLE_RATE_SET_SUCCESS",d.Started="started",d.Stopped="stopped",d.SegmentEnd="segment_end",d.Transcript="transcript",d.Entity="entity",d.Intent="intent",d.TentativeTranscript="tentative_transcript",d.TentativeEntities="tentative_entities",d.TentativeIntent="tentative_intent";const l=new Error("Current device does not support storage API"),h=new Error("Requested key was not present in storage");var u;t.DecoderState=void 0,(u=t.DecoderState||(t.DecoderState={}))[u.Failed=0]="Failed",u[u.Disconnected=1]="Disconnected",u[u.Connected=2]="Connected",u[u.Active=3]="Active";class p{constructor(){this.stateChangeCbs=[],this.transcriptCbs=[],this.entityCbs=[],this.intentCbs=[],this.segmentChangeCbs=[],this.tentativeTranscriptCbs=[],this.tentativeEntityCbs=[],this.tentativeIntentCbs=[],this.contextStartedCbs=[],this.contextStoppedCbs=[]}}function f(t){var e;return null!==(e=S.get(t))&&void 0!==e?e:"unknown"}const S=new Map([[t.DecoderState.Failed,"Failed"],[t.DecoderState.Disconnected,"Disconnected"],[t.DecoderState.Connected,"Connected"],[t.DecoderState.Active,"Active"]]);var v,b=new Uint8Array(16);function C(){if(!v&&!(v="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return v(b)}var g=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function m(t){return"string"==typeof t&&g.test(t)}for(var w=[],y=0;y<256;++y)w.push((y+256).toString(16).substr(1));function k(t,e,n){var i=(t=t||{}).random||(t.rng||C)();if(i[6]=15&i[6]|64,i[8]=63&i[8]|128,e){n=n||0;for(var s=0;s<16;++s)e[n+s]=i[s];return e}return function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=(w[t[e+0]]+w[t[e+1]]+w[t[e+2]]+w[t[e+3]]+"-"+w[t[e+4]]+w[t[e+5]]+"-"+w[t[e+6]]+w[t[e+7]]+"-"+w[t[e+8]]+w[t[e+9]]+"-"+w[t[e+10]]+w[t[e+11]]+w[t[e+12]]+w[t[e+13]]+w[t[e+14]]+w[t[e+15]]).toLowerCase();if(!m(n))throw TypeError("Stringified UUID is invalid");return n}(i)}var E="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},A={exports:{}};
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports):"function"==typeof define&&define.amd?define(["exports"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self).Speechly={})}(this,(function(t){"use strict";const e=new Error("Current device does not support microphone API"),i=new Error("AppId changed without project login"),n=16e3;class s{constructor(t,e){this.isFinalized=!1,this.words=[],this.entities=new Map,this.intent={intent:"",isFinal:!1},this.contextId=t,this.id=e}toSegment(){let t=0;const e=new Array(this.entities.size);return this.entities.forEach((i=>{e[t]=i,t++})),{id:this.id,contextId:this.contextId,isFinal:this.isFinalized,words:this.words,entities:e,intent:this.intent}}toString(){const t=this.toSegment(),e=t.words.filter((t=>t.value)).map((t=>({value:t.value,index:t.index}))),i=Object.assign(Object.assign({},t),{words:e});return JSON.stringify(i,null,2)}updateTranscript(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||(this.words[t.index]=t)})),this}updateEntities(t){return t.forEach((t=>{this.isFinalized&&!t.isFinal||this.entities.set(function(t){return`${t.startPosition.toString()}:${t.endPosition.toString()}`}(t),t)})),this}updateIntent(t){return this.isFinalized&&!t.isFinal||(this.intent=t),this}finalize(){return this.entities.forEach(((t,e)=>{t.isFinal||this.entities.delete(e)})),this.words=this.words.filter((t=>t.isFinal)),this.intent.isFinal||(this.intent.intent="",this.intent.isFinal=!0),this.isFinalized=!0,this}}function o(t,e,i,n){return new(i||(i=Promise))((function(s,o){function a(t){try{c(n.next(t))}catch(t){o(t)}}function d(t){try{c(n.throw(t))}catch(t){o(t)}}function c(t){var e;t.done?s(t.value):(e=t.value,e instanceof i?e:new i((function(t){t(e)}))).then(a,d)}c((n=n.apply(t,e||[])).next())}))}const a=new Error("Microphone is not initialized"),d=new Error("Microphone is already initialized"),c=new Error("Microphone consent is not given");var l,r,h;t.WebsocketResponseType=void 0,(l=t.WebsocketResponseType||(t.WebsocketResponseType={})).Started="started",l.Stopped="stopped",l.SegmentEnd="segment_end",l.Transcript="transcript",l.Entity="entity",l.Intent="intent",l.TentativeTranscript="tentative_transcript",l.TentativeEntities="tentative_entities",l.TentativeIntent="tentative_intent",t.WorkerSignal=void 0,(r=t.WorkerSignal||(t.WorkerSignal={})).Opened="WEBSOCKET_OPEN",r.Closed="WEBSOCKET_CLOSED",r.AudioProcessorReady="SOURCE_SAMPLE_RATE_SET_SUCCESS",r.VadSignalHigh="VadSignalHigh",r.VadSignalLow="VadSignalLow",t.ControllerSignal=void 0,(h=t.ControllerSignal||(t.ControllerSignal={})).connect="connect",h.initAudioProcessor="initAudioProcessor",h.adjustAudioProcessor="adjustAudioProcessor",h.SET_SHARED_ARRAY_BUFFERS="SET_SHARED_ARRAY_BUFFERS",h.CLOSE="CLOSE",h.START_CONTEXT="START_CONTEXT",h.SWITCH_CONTEXT="SWITCH_CONTEXT",h.STOP_CONTEXT="STOP_CONTEXT",h.AUDIO="AUDIO",h.startStream="startStream",h.stopStream="stopStream";const u=new Error("Current device does not support storage API"),p=new Error("Requested key was not present in storage"),b={enabled:!1,controlListening:!0,signalToNoiseDb:3,noiseGateDb:-24,noiseLearnHalftimeMillis:400,signalSearchFrames:5,signalActivation:.7,signalRelease:.2,signalSustainMillis:3e3};var m;t.DecoderState=void 0,(m=t.DecoderState||(t.DecoderState={}))[m.Failed=0]="Failed",m[m.Disconnected=1]="Disconnected",m[m.Connected=2]="Connected",m[m.Active=3]="Active";class Z{constructor(){this.stateChangeCbs=[],this.transcriptCbs=[],this.entityCbs=[],this.intentCbs=[],this.segmentChangeCbs=[],this.tentativeTranscriptCbs=[],this.tentativeEntityCbs=[],this.tentativeIntentCbs=[],this.contextStartedCbs=[],this.contextStoppedCbs=[],this.onVadStateChange=[]}}function y(t){var e;return null!==(e=G.get(t))&&void 0!==e?e:"unknown"}const G=new Map([[t.DecoderState.Failed,"Failed"],[t.DecoderState.Disconnected,"Disconnected"],[t.DecoderState.Connected,"Connected"],[t.DecoderState.Active,"Active"]]);var v,W=new Uint8Array(16);function X(){if(!v&&!(v="undefined"!=typeof crypto&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto)||"undefined"!=typeof msCrypto&&"function"==typeof msCrypto.getRandomValues&&msCrypto.getRandomValues.bind(msCrypto)))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return v(W)}var R=/^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;function V(t){return"string"==typeof t&&R.test(t)}for(var S=[],C=0;C<256;++C)S.push((C+256).toString(16).substr(1));function g(t,e,i){var n=(t=t||{}).random||(t.rng||X)();if(n[6]=15&n[6]|64,n[8]=63&n[8]|128,e){i=i||0;for(var s=0;s<16;++s)e[i+s]=n[s];return e}return function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,i=(S[t[e+0]]+S[t[e+1]]+S[t[e+2]]+S[t[e+3]]+"-"+S[t[e+4]]+S[t[e+5]]+"-"+S[t[e+6]]+S[t[e+7]]+"-"+S[t[e+8]]+S[t[e+9]]+"-"+S[t[e+10]]+S[t[e+11]]+S[t[e+12]]+S[t[e+13]]+S[t[e+14]]+S[t[e+15]]).toLowerCase();if(!V(i))throw TypeError("Stringified UUID is invalid");return i}(n)}var N="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:{},Y={exports:{}};
/*! http://mths.be/base64 v0.1.0 by @mathias | MIT license */
!function(t,e){!function(n){var i=e,s=t&&t.exports==i&&t,o="object"==typeof E&&E;o.global!==o&&o.window!==o||(n=o);var r=function(t){this.message=t};(r.prototype=new Error).name="InvalidCharacterError";var a=function(t){throw new r(t)},c="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",d=/[\t\n\f\r ]/g,l={encode:function(t){t=String(t),/[^\0-\xFF]/.test(t)&&a("The string to be encoded contains characters outside of the Latin1 range.");for(var e,n,i,s,o=t.length%3,r="",d=-1,l=t.length-o;++d<l;)e=t.charCodeAt(d)<<16,n=t.charCodeAt(++d)<<8,i=t.charCodeAt(++d),r+=c.charAt((s=e+n+i)>>18&63)+c.charAt(s>>12&63)+c.charAt(s>>6&63)+c.charAt(63&s);return 2==o?(e=t.charCodeAt(d)<<8,n=t.charCodeAt(++d),r+=c.charAt((s=e+n)>>10)+c.charAt(s>>4&63)+c.charAt(s<<2&63)+"="):1==o&&(s=t.charCodeAt(d),r+=c.charAt(s>>2)+c.charAt(s<<4&63)+"=="),r},decode:function(t){var e=(t=String(t).replace(d,"")).length;e%4==0&&(e=(t=t.replace(/==?$/,"")).length),(e%4==1||/[^+a-zA-Z0-9/]/.test(t))&&a("Invalid character: the string to be decoded is not correctly encoded.");for(var n,i,s=0,o="",r=-1;++r<e;)i=c.indexOf(t.charAt(r)),n=s%4?64*n+i:i,s++%4&&(o+=String.fromCharCode(255&n>>(-2*s&6)));return o},version:"0.1.0"};if(i&&!i.nodeType)if(s)s.exports=l;else for(var h in l)l.hasOwnProperty(h)&&(i[h]=l[h]);else n.base64=l}(E)}(A,A.exports);function R(t,e,n,i,s=Date.now){const o=function(t){const e=t.split(".")[1];let n;try{n=JSON.parse(A.exports.decode(e))}catch(t){throw new Error("Error decoding Speechly token!")}return{appId:n.appId,projectId:n.projectId,deviceId:n.deviceId,configId:n.configId,scopes:n.scope.split(" "),issuer:n.iss,audience:n.aud,expiresAtMs:1e3*n.exp}}(t);return!(o.expiresAtMs-s()<36e5)&&(o.appId===n&&o.projectId===e&&o.deviceId===i)}class x{constructor(){this.startCbs=[],this.stopCbs=[],this.onResponseCb=()=>{},this.onCloseCb=()=>{},this.onWebsocketMessage=e=>{const n=e.data;switch(n.type){case t.WebsocketResponseType.Opened:null!=this.resolveInitialization&&this.resolveInitialization();break;case t.WebsocketResponseType.Closed:this.onCloseCb({code:e.data.code,reason:e.data.reason,wasClean:e.data.wasClean});break;case t.WebsocketResponseType.SourceSampleRateSetSuccess:null!=this.resolveSourceSampleRateSet&&this.resolveSourceSampleRateSet();break;case t.WebsocketResponseType.Started:this.startCbs.forEach((t=>{try{t(void 0,n.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStart" callback:',t)}})),this.startCbs.length=0;break;case t.WebsocketResponseType.Stopped:this.stopCbs.forEach((t=>{try{t(void 0,n.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStop" callback:',t)}})),this.stopCbs.length=0;break;default:this.onResponseCb(n)}};const e=new Blob(["/**\n * Known WebSocket response types.\n * @public\n */\nvar WebsocketResponseType;\n(function (WebsocketResponseType) {\n WebsocketResponseType[\"Opened\"] = \"WEBSOCKET_OPEN\";\n WebsocketResponseType[\"SourceSampleRateSetSuccess\"] = \"SOURCE_SAMPLE_RATE_SET_SUCCESS\";\n WebsocketResponseType[\"Started\"] = \"started\";\n WebsocketResponseType[\"Stopped\"] = \"stopped\";\n})(WebsocketResponseType || (WebsocketResponseType = {}));\nvar CONTROL = {\n WRITE_INDEX: 0,\n FRAMES_AVAILABLE: 1,\n LOCK: 2\n};\nvar WebsocketClient = /** @class */ (function () {\n function WebsocketClient(ctx) {\n var _this = this;\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n this.shouldResendLastFramesSent = false;\n this.buffer = new Float32Array(0);\n this.lastFramesSent = new Int16Array(0); // to re-send after switch context\n this.debug = false;\n this.initialized = false;\n // WebSocket's close handler, called e.g. when\n // - normal close (code 1000)\n // - network unreachable or unable to (re)connect (code 1006)\n // List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code\n this.onWebsocketClose = function (event) {\n if (_this.debug) {\n console.log('[WebSocketClient]', 'onWebsocketClose');\n }\n _this.websocket.removeEventListener('open', _this.onWebsocketOpen);\n _this.websocket.removeEventListener('message', _this.onWebsocketMessage);\n _this.websocket.removeEventListener('error', _this.onWebsocketError);\n _this.websocket.removeEventListener('close', _this.onWebsocketClose);\n _this.websocket = undefined;\n _this.workerCtx.postMessage({\n type: 'WEBSOCKET_CLOSED',\n code: event.code,\n reason: event.reason,\n wasClean: event.wasClean\n });\n };\n this.onWebsocketOpen = function (_event) {\n if (_this.debug) {\n console.log('[WebSocketClient]', 'websocket opened');\n }\n if (_this.isContextStarted && !_this.isStartContextConfirmed) {\n _this.send(_this.outbox);\n }\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });\n };\n this.onWebsocketError = function (_event) {\n if (_this.debug) {\n console.log('[WebSocketClient]', 'websocket error');\n }\n };\n this.onWebsocketMessage = function (event) {\n var response;\n try {\n response = JSON.parse(event.data);\n }\n catch (e) {\n console.error('[WebSocketClient]', 'error parsing response from the server:', e);\n return;\n }\n if (response.type === WebsocketResponseType.Started) {\n _this.isStartContextConfirmed = true;\n if (_this.shouldResendLastFramesSent) {\n _this.resendLastFrames();\n _this.shouldResendLastFramesSent = false;\n }\n }\n _this.workerCtx.postMessage(response);\n };\n this.workerCtx = ctx;\n }\n WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {\n this.debug = debug;\n if (this.debug) {\n console.log('[WebSocketClient]', 'initialize worker');\n }\n this.apiUrl = apiUrl;\n this.authToken = authToken;\n this.targetSampleRate = targetSampleRate;\n this.initialized = true;\n this.isContextStarted = false;\n this.connect(0);\n };\n WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {\n this.sourceSampleRate = sourceSampleRate;\n this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;\n if (this.debug) {\n console.log('[WebSocketClient]', 'resampleRatio', this.resampleRatio);\n }\n if (this.resampleRatio > 1) {\n this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);\n }\n this.workerCtx.postMessage({ type: 'SOURCE_SAMPLE_RATE_SET_SUCCESS' });\n if (isNaN(this.resampleRatio)) {\n throw Error(\"resampleRatio is NaN source rate is \".concat(this.sourceSampleRate, \" and target rate is \").concat(this.targetSampleRate));\n }\n };\n WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {\n this.controlSAB = new Int32Array(controlSAB);\n this.dataSAB = new Float32Array(dataSAB);\n var audioHandleInterval = this.dataSAB.length / 32; // ms\n if (this.debug) {\n console.log('[WebSocketClient]', 'Audio handle interval', audioHandleInterval, 'ms');\n }\n setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);\n };\n WebsocketClient.prototype.connect = function (timeout) {\n if (timeout === void 0) { timeout = 1000; }\n if (this.debug) {\n console.log('[WebSocketClient]', 'connect in ', timeout / 1000, 'sec');\n }\n setTimeout(this.initializeWebsocket.bind(this), timeout);\n };\n WebsocketClient.prototype.initializeWebsocket = function () {\n if (this.debug) {\n console.log('[WebSocketClient]', 'connecting to ', this.apiUrl);\n }\n this.websocket = new WebSocket(this.apiUrl, this.authToken);\n this.websocket.addEventListener('open', this.onWebsocketOpen);\n this.websocket.addEventListener('message', this.onWebsocketMessage);\n this.websocket.addEventListener('error', this.onWebsocketError);\n this.websocket.addEventListener('close', this.onWebsocketClose);\n };\n WebsocketClient.prototype.isOpen = function () {\n return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;\n };\n WebsocketClient.prototype.resendLastFrames = function () {\n if (this.lastFramesSent.length > 0) {\n this.send(this.lastFramesSent);\n this.lastFramesSent = new Int16Array(0);\n }\n };\n WebsocketClient.prototype.sendAudio = function (audioChunk) {\n if (!this.isContextStarted) {\n return;\n }\n if (audioChunk.length > 0) {\n if (this.resampleRatio > 1) {\n // Downsampling\n this.send(this.downsample(audioChunk));\n }\n else {\n this.send(float32ToInt16(audioChunk));\n }\n }\n };\n WebsocketClient.prototype.sendAudioFromSAB = function () {\n if (!this.isContextStarted) {\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n return;\n }\n if (this.controlSAB == undefined) {\n return;\n }\n var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];\n var lock = this.controlSAB[CONTROL.LOCK];\n if (lock == 0 && framesAvailable > 0) {\n var data = this.dataSAB.subarray(0, framesAvailable);\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n if (data.length > 0) {\n var frames_1;\n if (this.resampleRatio > 1) {\n frames_1 = this.downsample(data);\n }\n else {\n frames_1 = float32ToInt16(data);\n }\n this.send(frames_1);\n // 16000 per second, 1000 in 100 ms\n // save last 250 ms\n if (this.lastFramesSent.length > 1024 * 4) {\n this.lastFramesSent = frames_1;\n }\n else {\n var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);\n concat.set(this.lastFramesSent);\n concat.set(frames_1, this.lastFramesSent.length);\n this.lastFramesSent = concat;\n }\n }\n }\n };\n WebsocketClient.prototype.startContext = function (appId) {\n if (this.isContextStarted) {\n console.error('[WebSocketClient]', \"can't start context: active context exists\");\n return;\n }\n this.isContextStarted = true;\n this.isStartContextConfirmed = false;\n if (appId !== undefined) {\n this.outbox = JSON.stringify({ event: 'start', appId: appId });\n }\n else {\n this.outbox = JSON.stringify({ event: 'start' });\n }\n this.send(this.outbox);\n };\n WebsocketClient.prototype.stopContext = function () {\n if (!this.websocket) {\n throw Error('WebSocket is undefined');\n }\n if (!this.isContextStarted) {\n console.error('[WebSocketClient]', \"can't stop context: no active context\");\n return;\n }\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n };\n WebsocketClient.prototype.switchContext = function (newAppId) {\n if (!this.websocket) {\n throw Error('WebSocket is undefined');\n }\n if (!this.isContextStarted) {\n console.error('[WebSocketClient]', \"can't switch context: no active context\");\n return;\n }\n if (newAppId == undefined) {\n console.error('[WebSocketClient]', \"can't switch context: new app id is undefined\");\n return;\n }\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n this.shouldResendLastFramesSent = true;\n this.send(JSON.stringify({ event: 'start', appId: newAppId }));\n };\n WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {\n if (websocketCode === void 0) { websocketCode = 1005; }\n if (reason === void 0) { reason = 'No Status Received'; }\n if (this.debug) {\n console.log('[WebSocketClient]', 'Websocket closing');\n }\n if (!this.websocket) {\n throw Error('WebSocket is undefined');\n }\n this.websocket.close(websocketCode, reason);\n };\n WebsocketClient.prototype.downsample = function (input) {\n var inputBuffer = new Float32Array(this.buffer.length + input.length);\n inputBuffer.set(this.buffer, 0);\n inputBuffer.set(input, this.buffer.length);\n var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);\n var outputBuffer = new Int16Array(outputLength);\n for (var i = 0; i < outputLength; i++) {\n var offset = Math.round(this.resampleRatio * i);\n var val = 0.0;\n for (var j = 0; j < this.filter.length; j++) {\n val += inputBuffer[offset + j] * this.filter[j];\n }\n outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);\n }\n var remainingOffset = Math.round(this.resampleRatio * outputLength);\n if (remainingOffset < inputBuffer.length) {\n this.buffer = inputBuffer.subarray(remainingOffset);\n }\n else {\n this.buffer = new Float32Array(0);\n }\n return outputBuffer;\n };\n WebsocketClient.prototype.send = function (data) {\n if (this.isOpen()) {\n try {\n this.websocket.send(data);\n }\n catch (error) {\n console.log('[WebSocketClient]', 'server connection error', error);\n }\n }\n };\n return WebsocketClient;\n}());\nvar ctx = self;\nvar websocketClient = new WebsocketClient(ctx);\nctx.onmessage = function (e) {\n switch (e.data.type) {\n case 'INIT':\n websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);\n break;\n case 'SET_SOURCE_SAMPLE_RATE':\n websocketClient.setSourceSampleRate(e.data.sourceSampleRate);\n break;\n case 'SET_SHARED_ARRAY_BUFFERS':\n websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);\n break;\n case 'CLOSE':\n websocketClient.closeWebsocket(1000, 'Close requested by client');\n break;\n case 'START_CONTEXT':\n websocketClient.startContext(e.data.appId);\n break;\n case 'SWITCH_CONTEXT':\n websocketClient.switchContext(e.data.appId);\n break;\n case 'STOP_CONTEXT':\n websocketClient.stopContext();\n break;\n case 'AUDIO':\n websocketClient.sendAudio(e.data.payload);\n break;\n default:\n console.log('WORKER', e);\n }\n};\nfunction float32ToInt16(buffer) {\n var buf = new Int16Array(buffer.length);\n for (var l = 0; l < buffer.length; l++) {\n buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);\n }\n return buf;\n}\nfunction generateFilter(sourceSampleRate, targetSampleRate, length) {\n if (length % 2 === 0) {\n throw Error('Filter length must be odd');\n }\n var cutoff = targetSampleRate / 2;\n var filter = new Float32Array(length);\n var sum = 0;\n for (var i = 0; i < length; i++) {\n var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));\n sum += x;\n filter[i] = x;\n }\n for (var i = 0; i < length; i++) {\n filter[i] = filter[i] / sum;\n }\n return filter;\n}\nfunction sinc(x) {\n if (x === 0.0) {\n return 1.0;\n }\n var piX = Math.PI * x;\n return Math.sin(piX) / piX;\n}\n"],{type:"text/javascript"}),n=window.URL.createObjectURL(e);this.worker=new Worker(n),this.worker.addEventListener("message",this.onWebsocketMessage)}onResponse(t){this.onResponseCb=t}onClose(t){this.onCloseCb=t}initialize(t,e,n,i){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:"INIT",apiUrl:t,authToken:e,targetSampleRate:n,debug:i}),this.startCbs=[],this.stopCbs=[],new Promise((t=>{this.resolveInitialization=t}))}))}setSourceSampleRate(t){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:"SET_SOURCE_SAMPLE_RATE",sourceSampleRate:t}),new Promise((t=>{this.resolveSourceSampleRateSet=t}))}))}close(){return o(this,void 0,void 0,(function*(){return new Promise(((t,e)=>{this.worker.postMessage({type:"CLOSE",code:1e3,message:"Client has ended the session"}),t()}))}))}startContext(t){return o(this,void 0,void 0,(function*(){return new Promise(((e,n)=>{this.startCbs.push(((t,i)=>{void 0!==t?n(t):e(i)})),null!=t?this.worker.postMessage({type:"START_CONTEXT",appId:t}):this.worker.postMessage({type:"START_CONTEXT"})}))}))}stopContext(){return o(this,void 0,void 0,(function*(){return new Promise(((t,e)=>{this.stopCbs.push(((n,i)=>{void 0!==n?e(n):t(i)})),this.worker.postMessage({type:"STOP_CONTEXT"})}))}))}switchContext(t){return o(this,void 0,void 0,(function*(){return new Promise(((e,n)=>{this.startCbs.push(((t,i)=>{void 0!==t?n(t):e(i)})),this.worker.postMessage({type:"SWITCH_CONTEXT",appId:t})}))}))}postMessage(t){this.worker.postMessage(t)}sendAudio(t){this.worker.postMessage({type:"AUDIO",payload:t})}}class T{constructor(){this.storage=window.localStorage}get(t){return this.storage.getItem(t)}set(t,e){this.storage.setItem(t,e)}getOrSet(t,e){let n=this.storage.getItem(t);return null===n&&(n=e(),this.storage.setItem(t,n)),n}}function I(t,e){return{intent:t.intent,isFinal:e}}const W="speechly-auth-token";class _{constructor(e){var n,o,r,a,c,d,l,h;if(this.activeContexts=new Map,this.maxReconnectAttemptCount=10,this.contextStopDelay=250,this.connectAttempt=0,this.connectPromise=null,this.listeningPromise=null,this.cbs=[],this.state=t.DecoderState.Disconnected,this.handleWebsocketResponse=e=>{var n;this.debug&&console.log("[Decoder]","Received response",e);const{audio_context:i,segment_id:o,type:r}=e;let{data:a}=e;const c=this.activeContexts.get(i);if(void 0===c)return void console.warn("[Decoder]","Received response for non-existent context",i);let d=null!==(n=c.get(o))&&void 0!==n?n:new s(i,o);switch(r){case t.WebsocketResponseType.TentativeTranscript:const e=function(t){return t.words.map((({word:t,index:e,start_timestamp:n,end_timestamp:i})=>({value:t,index:e,startTimestamp:n,endTimestamp:i,isFinal:!1})))}(a),n=a.transcript;this.cbs.forEach((t=>t.tentativeTranscriptCbs.forEach((t=>t(i,o,e,n))))),d=d.updateTranscript(e);break;case t.WebsocketResponseType.Transcript:const s=function(t){return{value:t.word,index:t.index,startTimestamp:t.start_timestamp,endTimestamp:t.end_timestamp,isFinal:!0}}(a);this.cbs.forEach((t=>t.transcriptCbs.forEach((t=>t(i,o,s))))),d=d.updateTranscript([s]);break;case t.WebsocketResponseType.TentativeEntities:const r=function(t){return t.entities.map((({entity:t,value:e,start_position:n,end_position:i})=>({type:t,value:e,startPosition:n,endPosition:i,isFinal:!1})))}(a);this.cbs.forEach((t=>t.tentativeEntityCbs.forEach((t=>t(i,o,r))))),d=d.updateEntities(r);break;case t.WebsocketResponseType.Entity:const c=function(t){return{type:t.entity,value:t.value,startPosition:t.start_position,endPosition:t.end_position,isFinal:!0}}(a);this.cbs.forEach((t=>t.entityCbs.forEach((t=>t(i,o,c))))),d=d.updateEntities([c]);break;case t.WebsocketResponseType.TentativeIntent:const l=I(a,!1);this.cbs.forEach((t=>t.tentativeIntentCbs.forEach((t=>t(i,o,l))))),d=d.updateIntent(l);break;case t.WebsocketResponseType.Intent:const h=I(a,!0);this.cbs.forEach((t=>t.intentCbs.forEach((t=>t(i,o,h))))),d=d.updateIntent(h);break;case t.WebsocketResponseType.SegmentEnd:d=d.finalize()}c.set(o,d),this.activeContexts.set(i,c),this.logSegments&&console.info(d.toString()),this.cbs.forEach((t=>t.segmentChangeCbs.forEach((t=>t(d.toSegment())))))},this.handleWebsocketClosure=e=>{if(1e3===e.code)this.debug&&console.log("[Decoder]","Websocket closed",e);else{if(console.error("[Decoder]","Websocket closed due to error",e),void 0===this.deviceId)return void this.setState(t.DecoderState.Failed);this.listeningPromise=null,this.setState(t.DecoderState.Disconnected),this.reconnect()}},this.logSegments=null!==(n=e.logSegments)&&void 0!==n&&n,this.appId=null!==(o=e.appId)&&void 0!==o?o:void 0,this.projectId=null!==(r=e.projectId)&&void 0!==r?r:void 0,this.sampleRate=null!==(a=e.sampleRate)&&void 0!==a?a:i,this.debug=null!==(c=e.debug)&&void 0!==c&&c,void 0!==this.appId&&void 0!==this.projectId)throw Error("[Decoder] You cannot use both appId and projectId at the same time");if(void 0===this.appId&&void 0===this.projectId)throw Error("[Decoder] Either an appId or a projectId is required");const u=null!==(d=e.apiUrl)&&void 0!==d?d:"https://api.speechly.com";this.apiUrl=function(t,e){const n=new URLSearchParams;return n.append("sampleRate",e.toString()),`${t}?${n.toString()}`}(u.replace("http","ws")+"/ws/v1",this.sampleRate),this.loginUrl=`${u}/login`,this.storage=null!==(l=e.storage)&&void 0!==l?l:new T,this.deviceId=this.storage.getOrSet("speechly-device-id",k),this.apiClient=new x,this.apiClient.onResponse(this.handleWebsocketResponse),this.apiClient.onClose(this.handleWebsocketClosure),(null===(h=e.connect)||void 0===h||h)&&this.connect()}getReconnectDelayMs(t){return 100*Math.pow(2,t)}sleep(t){return o(this,void 0,void 0,(function*(){return new Promise((e=>setTimeout(e,t)))}))}connect(){return o(this,void 0,void 0,(function*(){null===this.connectPromise&&(this.connectPromise=(()=>o(this,void 0,void 0,(function*(){const e=this.storage.get(W);if(null!=e&&R(e,this.projectId,this.appId,this.deviceId))this.authToken=e;else try{this.authToken=yield function(t,e,n,i,s=fetch,r=Date.now){var a;return o(this,void 0,void 0,(function*(){let o;o=void 0!==e?{projectId:e,deviceId:i}:{appId:n,deviceId:i};const c=yield s(t,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(o)}),d=yield c.json();if(200!==c.status)throw Error(null!==(a=d.error)&&void 0!==a?a:`Speechly API login request failed with ${c.status}`);if(void 0===d.access_token)throw Error("Invalid login response from Speechly API");if(!R(d.access_token,e,n,i,r))throw Error("Invalid token received from Speechly API");return d.access_token}))}(this.loginUrl,this.projectId,this.appId,this.deviceId),this.storage.set(W,this.authToken)}catch(e){throw this.setState(t.DecoderState.Failed),e}yield this.apiClient.initialize(this.apiUrl,this.authToken,this.sampleRate,this.debug),this.advanceState(t.DecoderState.Connected)})))()),yield this.connectPromise}))}close(){return o(this,void 0,void 0,(function*(){let e;try{yield this.apiClient.close()}catch(t){e=t.message}if(this.activeContexts.clear(),this.connectPromise=null,this.setState(t.DecoderState.Disconnected),void 0!==e)throw Error(e)}))}startContext(e){return o(this,void 0,void 0,(function*(){if(this.state===t.DecoderState.Failed)throw Error("[Decoder] startContext cannot be run in Failed state.");if(this.state<t.DecoderState.Connected)yield this.connect();else if(this.state>t.DecoderState.Connected)throw Error("[Decoder] Unable to complete startContext: Expected Connected state, but was in "+f(this.state)+".");let i;if(this.setState(t.DecoderState.Active),null!=this.projectId)i=yield this.apiClient.startContext(null==e?void 0:e.appId);else{if(null!=(null==e?void 0:e.appId)&&this.appId!==(null==e?void 0:e.appId))throw this.setState(t.DecoderState.Failed),n;i=yield this.apiClient.startContext()}if(this.state<t.DecoderState.Active)throw Error("[Decoder] Unable to complete startContext: Problem acquiring contextId");return this.activeContexts.set(i,new Map),this.cbs.forEach((t=>t.contextStartedCbs.forEach((t=>t(i))))),i}))}sendAudio(e){if(this.state!==t.DecoderState.Active)throw Error("[Decoder] Unable to complete startContext: Expected Active state, but was in "+f(this.state)+".");this.apiClient.sendAudio(e)}stopContext(){return o(this,void 0,void 0,(function*(){if(this.state===t.DecoderState.Failed)throw Error("[Decoder] stopContext cannot be run in unrecovable error state.");if(this.state!==t.DecoderState.Active)throw Error("[Decoder] Unable to complete stopContext: Expected Active state, but was in "+f(this.state)+".");this.setState(t.DecoderState.Connected),yield this.sleep(this.contextStopDelay);try{const t=yield this.apiClient.stopContext();return this.activeContexts.delete(t),this.cbs.forEach((e=>e.contextStoppedCbs.forEach((e=>e(t))))),t}catch(e){throw this.setState(t.DecoderState.Failed),e}}))}switchContext(e){return o(this,void 0,void 0,(function*(){if(this.state!==t.DecoderState.Active)throw Error("[Decoder] Unable to complete switchContext: Expected Active state, but was in "+f(this.state)+".");const n=yield this.apiClient.switchContext(e);this.activeContexts.set(n,new Map)}))}registerListener(t){this.cbs.push(t)}setSampleRate(t){return o(this,void 0,void 0,(function*(){this.sampleRate=t,yield this.apiClient.setSourceSampleRate(t)}))}useSharedArrayBuffers(t,e){this.apiClient.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e})}reconnect(){return o(this,void 0,void 0,(function*(){this.debug&&console.log("[Decoder]","Reconnecting...",this.connectAttempt),this.connectPromise=null,this.connectAttempt<this.maxReconnectAttemptCount?(yield this.sleep(this.getReconnectDelayMs(this.connectAttempt++)),yield this.connect()):console.error("[Decoder] Maximum reconnect count reached, giving up automatic reconnect.")}))}advanceState(t){this.state>=t||this.setState(t)}setState(t){this.state!==t&&(this.debug&&console.log("[Decoder]",f(this.state),"->",f(t)),this.state=t,this.cbs.forEach((e=>{var n;return null===(n=e.stateChangeCbs)||void 0===n?void 0:n.forEach((e=>e(t)))})))}}t.BrowserClient=class{constructor(t){var e,n;this.debug=!1,this.initialized=!1,this.active=!1,this.stats={maxSignalEnergy:0,sentSamples:0};const i=window.navigator.mediaDevices.getSupportedConstraints();this.nativeResamplingSupported=!0===i.sampleRate,this.isMobileSafari=["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].indexOf(navigator.platform)>=0||navigator.userAgent.includes("Mac")&&"ontouchend"in document,this.isSafari=this.isMobileSafari||void 0!==window.safari,this.useSAB=!this.isSafari,this.debug=null===(e=t.debug)||void 0===e||e,this.callbacks=new p,this.decoder=null!==(n=t.decoder)&&void 0!==n?n:new _(t),this.decoder.registerListener(this.callbacks)}initialize(t){var n,s;return o(this,void 0,void 0,(function*(){if(!this.initialized){this.initialized=!0,this.debug&&console.log("[BrowserClient]","initializing"),yield this.decoder.connect();try{const t={};if(this.nativeResamplingSupported&&(t.sampleRate=i),void 0!==window.webkitAudioContext)try{this.audioContext=new window.webkitAudioContext(t)}catch(t){this.debug&&console.log("[BrowserClient]","creating audioContext without samplerate conversion",t),this.audioContext=new window.webkitAudioContext}else this.audioContext=new window.AudioContext(t),void 0!==window.webkitAudioContext&&(yield this.audioContext.resume())}catch(t){throw e}if(this.isSafari||void 0===window.AudioWorkletNode){if(this.debug&&console.log("[BrowserClient]","using ScriptProcessorNode"),void 0!==window.webkitAudioContext){const t=this.audioContext.sampleRate/i,e=4096*Math.pow(2,Math.ceil(Math.log(t)/Math.log(2)));this.audioProcessor=this.audioContext.createScriptProcessor(e,1,1)}else this.audioProcessor=this.audioContext.createScriptProcessor(void 0,1,1);this.audioProcessor.connect(this.audioContext.destination),this.audioProcessor.addEventListener("audioprocess",(t=>{this.handleAudio(t.inputBuffer.getChannelData(0))}))}else{this.debug&&console.log("[BrowserClient]","using AudioWorkletNode");const t=new Blob(["\n// Indices for the Control SAB.\nconst CONTROL = {\n 'WRITE_INDEX': 0,\n 'FRAMES_AVAILABLE': 1,\n 'LOCK': 2,\n};\n\nclass SpeechlyProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n\n this._initialized = false;\n this.debug = false;\n this.port.onmessage = this._initialize.bind(this);\n }\n\n _initialize(event) {\n this.controlSAB = new Int32Array(event.data.controlSAB);\n this.dataSAB = new Float32Array(event.data.dataSAB);\n this.debug = event.data.debug;\n if (this.debug) {\n console.log('[BrowserClient AudioWorkletNode]', 'initializing audioworklet');\n }\n this.sharedBufferSize = this.dataSAB.length;\n this.buffer = new Float32Array(0);\n this._initialized = true;\n }\n\n _transferDataToSharedBuffer(data) {\n this.controlSAB[CONTROL.LOCK] = 1;\n let inputWriteIndex = this.controlSAB[CONTROL.WRITE_INDEX];\n if (this.controlSAB[CONTROL.FRAMES_AVAILABLE] > 0) {\n if (inputWriteIndex + data.length > this.sharedBufferSize) {\n // console.log('buffer overflow')\n inputWriteIndex = 0;\n }\n }\n this.dataSAB.set(data, inputWriteIndex);\n this.controlSAB[CONTROL.WRITE_INDEX] = inputWriteIndex + data.length;\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = inputWriteIndex + data.length;\n this.controlSAB[CONTROL.LOCK] = 0;\n }\n\n _pushData(data) {\n if (this.debug) {\n const signalEnergy = getStandardDeviation(data)\n this.port.postMessage({\n type: 'STATS',\n signalEnergy: signalEnergy,\n samples: data.length,\n });\n }\n\n if (this.buffer.length > this.sharedBufferSize) {\n const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize);\n this._transferDataToSharedBuffer(dataToTransfer);\n this.buffer = this.buffer.subarray(this.sharedBufferSize);\n }\n let concat = new Float32Array(this.buffer.length + data.length);\n concat.set(this.buffer);\n concat.set(data, this.buffer.length);\n this.buffer = concat;\n }\n\n process(inputs, outputs, parameters) {\n const inputChannelData = inputs[0][0];\n if (inputChannelData !== undefined) {\n if (this.controlSAB && this.dataSAB) {\n this._pushData(inputChannelData);\n } else {\n this.port.postMessage({\n type: 'DATA',\n frames: inputChannelData\n });\n }\n }\n\n return true;\n }\n}\n\nfunction getStandardDeviation(array) {\n const n = array.length\n const mean = array.reduce((a, b) => a + b) / n\n return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n)\n}\n\nregisterProcessor('speechly-worklet', SpeechlyProcessor);\n"],{type:"text/javascript"}),e=window.URL.createObjectURL(t);if(yield this.audioContext.audioWorklet.addModule(e),this.speechlyNode=new AudioWorkletNode(this.audioContext,"speechly-worklet"),this.speechlyNode.connect(this.audioContext.destination),this.useSAB&&void 0!==window.SharedArrayBuffer){this.debug&&console.log("[BrowserClient]","using SharedArrayBuffer");const t=new window.SharedArrayBuffer(4*Int32Array.BYTES_PER_ELEMENT),e=new window.SharedArrayBuffer(1024*Float32Array.BYTES_PER_ELEMENT);this.decoder.useSharedArrayBuffers(t,e),this.speechlyNode.port.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e,debug:this.debug})}else this.debug&&console.log("[BrowserClient]","can not use SharedArrayBuffer");this.speechlyNode.port.onmessage=t=>{switch(t.data.type){case"STATS":t.data.signalEnergy>this.stats.maxSignalEnergy&&(this.stats.maxSignalEnergy=t.data.signalEnergy),this.stats.sentSamples+=parseInt(t.data.samples);break;case"DATA":this.handleAudio(t.data.frames)}}}this.debug&&console.log("[BrowserClient]","audioContext sampleRate is",null===(n=this.audioContext)||void 0===n?void 0:n.sampleRate),yield this.decoder.setSampleRate(null===(s=this.audioContext)||void 0===s?void 0:s.sampleRate),(null==t?void 0:t.mediaStream)&&(yield this.attach(null==t?void 0:t.mediaStream))}}))}close(){var t,e,n;return o(this,void 0,void 0,(function*(){yield this.detach(),null!==this.speechlyNode&&(null===(t=this.speechlyNode)||void 0===t||t.port.close(),null===(e=this.speechlyNode)||void 0===e||e.disconnect()),void 0!==this.audioProcessor&&(null===(n=this.audioProcessor)||void 0===n||n.disconnect()),yield this.decoder.close(),this.initialized=!1}))}attach(t){var e,n,i,s,r,a;return o(this,void 0,void 0,(function*(){if(yield this.initialize(),yield this.detach(),this.stream=null===(e=this.audioContext)||void 0===e?void 0:e.createMediaStreamSource(t),"running"!==(null===(n=this.audioContext)||void 0===n?void 0:n.state)&&(this.debug&&console.log("[BrowserClient]","audioContext resume required, state is",null===(i=this.audioContext)||void 0===i?void 0:i.state),yield null===(s=this.audioContext)||void 0===s?void 0:s.resume()),this.speechlyNode)null===(r=this.stream)||void 0===r||r.connect(this.speechlyNode);else{if(!this.audioProcessor)throw Error("[BrowserClient] cannot attach to mediaStream, not initialized");null===(a=this.stream)||void 0===a||a.connect(this.audioProcessor)}}))}detach(){return o(this,void 0,void 0,(function*(){this.active&&(yield this.stop()),this.stream&&(this.stream.disconnect(),this.stream=void 0)}))}uploadAudioData(t,e){var n;return o(this,void 0,void 0,(function*(){yield this.initialize();const i=yield null===(n=this.audioContext)||void 0===n?void 0:n.decodeAudioData(t);if(void 0===i)throw Error("Could not decode audioData");const s=i.getChannelData(0);if(i.numberOfChannels>1){const t=i.getChannelData(1);for(let e=0;e<s.length;e++)s[e]=(s[e]+t[e])/2}const o=yield this.start(e);let r;for(let t=0;t<s.length;t+=16e3){const e=t+16e3;r=e>s.length?s.slice(t):s.slice(t,e),this.handleAudio(r)}return yield this.stop(),o}))}start(t){return o(this,void 0,void 0,(function*(){yield this.initialize();const e=this.decoder.startContext(t);return this.active=!0,e}))}stop(){return o(this,void 0,void 0,(function*(){let t=null;try{t=yield this.decoder.stopContext(),0===this.stats.sentSamples&&console.warn("[BrowserClient]","audioContext contained no audio data")}catch(t){console.warn("[BrowserClient]","stop() failed",t)}finally{this.active=!1,this.stats.sentSamples=0}return t}))}handleAudio(t){this.active&&t.length>0&&(this.stats.sentSamples+=t.length,this.decoder.sendAudio(t))}isActive(){return this.active}onSegmentChange(t){this.callbacks.segmentChangeCbs.push(t)}onTranscript(t){this.callbacks.transcriptCbs.push(t)}onEntity(t){this.callbacks.entityCbs.push(t)}onIntent(t){this.callbacks.intentCbs.push(t)}onTentativeTranscript(t){this.callbacks.tentativeTranscriptCbs.push(t)}onTentativeEntities(t){this.callbacks.tentativeEntityCbs.push(t)}onTentativeIntent(t){this.callbacks.tentativeIntentCbs.push(t)}onStateChange(t){this.callbacks.stateChangeCbs.push(t)}},t.BrowserMicrophone=class{constructor(){this.muted=!1,this.initialized=!1;try{const t=window.navigator.mediaDevices.getSupportedConstraints();this.nativeResamplingSupported=!0===t.sampleRate,this.autoGainControl=!0===t.autoGainControl}catch(t){this.nativeResamplingSupported=!1,this.autoGainControl=!1}}initialize(){var t;return o(this,void 0,void 0,(function*(){if(this.initialized)return;if(void 0===(null===(t=window.navigator)||void 0===t?void 0:t.mediaDevices))throw e;const n={video:!1};this.nativeResamplingSupported||this.autoGainControl?n.audio={sampleRate:i,autoGainControl:this.autoGainControl}:n.audio=!0;try{this.mediaStream=yield window.navigator.mediaDevices.getUserMedia(n)}catch(t){throw console.error(t),c}this.initialized=!0,this.muted=!0}))}close(){return o(this,void 0,void 0,(function*(){if(!this.initialized)throw r;this.muted=!0;this.mediaStream.getTracks().forEach((t=>t.stop())),this.mediaStream=void 0,this.initialized=!1}))}isRecording(){return!this.muted}},t.CloudDecoder=_,t.DefaultSampleRate=i,t.ErrAlreadyInitialized=a,t.ErrAppIdChangeWithoutProjectLogin=n,t.ErrDeviceNotSupported=e,t.ErrKeyNotFound=h,t.ErrNoAudioConsent=c,t.ErrNoStorageSupport=l,t.ErrNotInitialized=r,t.EventCallbacks=p,t.SegmentState=s,t.stateToString=f,Object.defineProperty(t,"__esModule",{value:!0})}));
!function(t,e){!function(i){var n=e,s=t&&t.exports==n&&t,o="object"==typeof N&&N;o.global!==o&&o.window!==o||(i=o);var a=function(t){this.message=t};(a.prototype=new Error).name="InvalidCharacterError";var d=function(t){throw new a(t)},c="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",l=/[\t\n\f\r ]/g,r={encode:function(t){t=String(t),/[^\0-\xFF]/.test(t)&&d("The string to be encoded contains characters outside of the Latin1 range.");for(var e,i,n,s,o=t.length%3,a="",l=-1,r=t.length-o;++l<r;)e=t.charCodeAt(l)<<16,i=t.charCodeAt(++l)<<8,n=t.charCodeAt(++l),a+=c.charAt((s=e+i+n)>>18&63)+c.charAt(s>>12&63)+c.charAt(s>>6&63)+c.charAt(63&s);return 2==o?(e=t.charCodeAt(l)<<8,i=t.charCodeAt(++l),a+=c.charAt((s=e+i)>>10)+c.charAt(s>>4&63)+c.charAt(s<<2&63)+"="):1==o&&(s=t.charCodeAt(l),a+=c.charAt(s>>2)+c.charAt(s<<4&63)+"=="),a},decode:function(t){var e=(t=String(t).replace(l,"")).length;e%4==0&&(e=(t=t.replace(/==?$/,"")).length),(e%4==1||/[^+a-zA-Z0-9/]/.test(t))&&d("Invalid character: the string to be decoded is not correctly encoded.");for(var i,n,s=0,o="",a=-1;++a<e;)n=c.indexOf(t.charAt(a)),i=s%4?64*i+n:n,s++%4&&(o+=String.fromCharCode(255&i>>(-2*s&6)));return o},version:"0.1.0"};if(n&&!n.nodeType)if(s)s.exports=r;else for(var h in r)r.hasOwnProperty(h)&&(n[h]=r[h]);else i.base64=r}(N)}(Y,Y.exports);function T(t,e,i,n,s=Date.now){const o=function(t){const e=t.split(".")[1];let i;try{i=JSON.parse(Y.exports.decode(e))}catch(t){throw new Error("Error decoding Speechly token!")}return{appId:i.appId,projectId:i.projectId,deviceId:i.deviceId,configId:i.configId,scopes:i.scope.split(" "),issuer:i.iss,audience:i.aud,expiresAtMs:1e3*i.exp}}(t);return!(o.expiresAtMs-s()<36e5)&&(o.appId===i&&o.projectId===e&&o.deviceId===n)}class z{constructor(){this.startCbs=[],this.stopCbs=[],this.onResponseCb=()=>{},this.onCloseCb=()=>{},this.onWebsocketMessage=e=>{const i=e.data;switch(i.type){case t.WorkerSignal.Opened:null!=this.resolveInitialization&&this.resolveInitialization();break;case t.WorkerSignal.Closed:this.onCloseCb({code:e.data.code,reason:e.data.reason,wasClean:e.data.wasClean});break;case t.WorkerSignal.AudioProcessorReady:null!=this.resolveSourceSampleRateSet&&this.resolveSourceSampleRateSet();break;case t.WebsocketResponseType.Started:this.onResponseCb(i),this.startCbs.forEach((t=>{try{t(void 0,i.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStart" callback:',t)}})),this.startCbs.length=0;break;case t.WebsocketResponseType.Stopped:this.onResponseCb(i),this.stopCbs.forEach((t=>{try{t(void 0,i.audio_context)}catch(t){console.error('[SpeechlyClient] Error while invoking "onStop" callback:',t)}})),this.stopCbs.length=0;break;default:this.onResponseCb(i)}},this.worker=new"/* rollup-plugin-web-worker-loader */
var worker_code=function(){"use strict";class t{static downsample(t,e,s=0,i=-1,o=0,r=-1){if(i<0&&(i=t.length-s),r<0&&(r=e.length-o),r>i)throw new Error(`Can't downsample: source array length (${i}) is shorter than destination (${r})`);if(0===r)throw new Error(`Can't downsample: source array length (${i}) can't be downsampled to zero-length destination.`);if(0===i)throw new Error("Can't downsample: source range can't be zero length.");if(1===i)return void(e[0]=t[0]);let a=0;const n=(r-1)/(i-1);let h=0,l=0;const d=s+i;for(;s<d;s++){const i=.5-Math.abs(a);h+=t[s]*i,l+=i,a+=n,a>=.5&&(a-=1,e[o++]=h/l,h=0,l=0)}l>0&&(e[o++]=h/l)}static getEnergy(t,e=0,s=-1){if(s<0&&(s=t.length-e),s<=0)return 0;const i=e+s;let o=0;for(;e<i;e++)o+=t[e]*t[e];return Math.sqrt(o/s)}static getAudioPeak(t,e=0,s=-1){if(s<0&&(s=t.length-e),s<=0)return 0;const i=e+s;let o=0;for(;e<i;e++)t[e]>o&&(o=t[e]);return o}static convertInt16ToFloat(t,e,s=0,i=-1,o=0){i<0&&(i=t.length/2-s);const r=Math.min(t.length/2-s,e.length-o);if((i=Math.min(i,r))<=0)return 0;let a=2*s;const n=a+2*i;for(;a<n;)e[o++]=(t[a++]+(t[a++]<<8))/32767;return i}static convertFloatToInt16(t,e,s=0,i=-1,o=0){i<0&&(i=t.length-s);const r=s+i;for(;s<r;)e[o++]=~~(32767*t[s++])}static energyToDb(e){return 10*Math.log(e)/t.LOG_2_PLUS_LOG_5}static dbToEnergy(t){return Math.pow(10,t/10)}}t.LOG_2_PLUS_LOG_5=Math.log(2)+Math.log(5);class e{constructor(t,e,s){this.isActive=!1,this.streamSamplePos=0,this.samplesSent=0,this.utteranceSerial=-1,this.sendAudio=(t,e,s)=>{},this.onVadSignalLow=()=>{},this.onVadSignalHigh=()=>{},this.inputSampleRate=16e3,this.internalSampleRate=16e3,this.historyFrames=5,this.frameMillis=30,this.currentFrameNumber=0,this.frameSamplePos=0,this.streamFramePos=0,this.isSignalDetected=!1,this.inputSampleRate=t,this.internalSampleRate=e,this.historyFrames=s,this.frameSamples=~~(this.internalSampleRate*this.frameMillis/1e3),this.sampleRingBuffer=new Float32Array(this.frameSamples*this.historyFrames)}startContext(){this.isActive=!0,this.samplesSent=0,this.utteranceSerial++}stopContext(){this.flush(),this.isActive=!1}resetStream(){var t;this.streamFramePos=0,this.streamSamplePos=0,this.frameSamplePos=0,this.currentFrameNumber=0,this.utteranceSerial=-1,null===(t=this.vad)||void 0===t||t.resetVAD()}flush(){this.processAudio(this.sampleRingBuffer,0,this.frameSamplePos,!0)}processAudio(e,s=0,i=-1,o=!1){if(i<0&&(i=e.length),0===i)return;let r=s;const a=s+i;for(;r<a;){const s=this.currentFrameNumber*this.frameSamples;if(this.inputSampleRate===this.internalSampleRate){const t=Math.min(a-r,this.frameSamples-this.frameSamplePos),i=this.frameSamplePos+t;for(;this.frameSamplePos<i;)this.sampleRingBuffer[s+this.frameSamplePos++]=e[r++]}else{const i=1*this.inputSampleRate/this.internalSampleRate,o=Math.min(a-r,Math.round(i*(this.frameSamples-this.frameSamplePos))),n=Math.min(Math.round((a-r)/i),this.frameSamples-this.frameSamplePos);n>0&&t.downsample(e,this.sampleRingBuffer,r,o,s+this.frameSamplePos,n),r+=o,this.frameSamplePos+=n}if(this.frameSamplePos>this.frameSamples)throw new Error(`this.frameSamplePos (${this.frameSamplePos}) > this.frameSamples (${this.frameSamples})`);if(this.frameSamplePos===this.frameSamples||o){const t=o?this.frameSamplePos:this.frameSamples;if(o||this.processFrame(this.sampleRingBuffer,s,t),this.isActive){if(0===this.samplesSent){const t=Math.min(this.streamFramePos,this.historyFrames-1);let e=(this.currentFrameNumber+this.historyFrames-t)%this.historyFrames;for(;e!==this.currentFrameNumber;)this.sendAudio(this.sampleRingBuffer,e*this.frameSamples,this.frameSamples),this.samplesSent+=this.frameSamples,e=(e+1)%this.historyFrames}this.sendAudio(this.sampleRingBuffer,s,t),this.samplesSent+=t}this.frameSamplePos===this.frameSamples&&(this.frameSamplePos=0,this.streamFramePos+=1,this.streamSamplePos+=t,this.currentFrameNumber=(this.currentFrameNumber+1)%this.historyFrames)}}}processFrame(t,e=0,s=-1){this.analyzeAudioFrame(t,e,s),this.autoControlListening()}analyzeAudioFrame(t,e,s){var i;(null===(i=this.vad)||void 0===i?void 0:i.vadOptions.enabled)&&this.vad.processFrame(t,e,s)}autoControlListening(){var t;(null===(t=this.vad)||void 0===t?void 0:t.vadOptions.enabled)&&(!this.isSignalDetected&&this.vad.isSignalDetected&&(this.onVadSignalHigh(),this.isSignalDetected=!0),this.isSignalDetected&&!this.vad.isSignalDetected&&(this.onVadSignalLow(),this.isSignalDetected=!1))}}class s{constructor(t){this.isSignalDetected=!1,this.signalDb=-90,this.noiseLevelDb=-90,this.frameMillis=30,this.energy=0,this.baselineEnergy=-1,this.loudFrameBits=0,this.vadSustainMillisLeft=0,this.vadOptions=t}adjustVadOptions(t){this.vadOptions=Object.assign(Object.assign({},this.vadOptions),t)}resetVAD(){this.isSignalDetected=!1,this.loudFrameBits=0,this.energy=0,this.baselineEnergy=-1}processFrame(e,s=0,i=-1){if(!this.vadOptions.enabled)return void this.resetVAD();this.energy=t.getEnergy(e,s,i),this.baselineEnergy<0&&(this.baselineEnergy=this.energy);const o=this.energy>Math.max(t.dbToEnergy(this.vadOptions.noiseGateDb),this.baselineEnergy*t.dbToEnergy(this.vadOptions.signalToNoiseDb));this.pushFrameHistory(o),this.isSignalDetected=this.determineNewSignalState(this.isSignalDetected),this.adaptBackgroundNoise(),this.signalDb=t.energyToDb(this.energy/this.baselineEnergy),this.noiseLevelDb=t.energyToDb(this.baselineEnergy)}determineNewSignalState(t){this.vadSustainMillisLeft=Math.max(this.vadSustainMillisLeft-this.frameMillis,0);const e=this.countLoudFrames(this.vadOptions.signalSearchFrames),s=Math.round(this.vadOptions.signalActivation*this.vadOptions.signalSearchFrames),i=Math.round(this.vadOptions.signalRelease*this.vadOptions.signalSearchFrames);return e>=s?(this.vadSustainMillisLeft=this.vadOptions.signalSustainMillis,!0):!(e<=i&&0===this.vadSustainMillisLeft)&&t}adaptBackgroundNoise(){if(!this.isSignalDetected&&this.vadOptions.noiseLearnHalftimeMillis>0){var t=Math.pow(2,-this.frameMillis/this.vadOptions.noiseLearnHalftimeMillis);this.baselineEnergy=this.baselineEnergy*t+this.energy*(1-t)}}pushFrameHistory(t){this.loudFrameBits=(t?1:0)|this.loudFrameBits<<1}countLoudFrames(t){let e=0,s=this.loudFrameBits;for(;t>0;)1==(1&s)&&e++,s>>=1,t--;return e}}var i,o,r;!function(t){t.Started="started",t.Stopped="stopped",t.SegmentEnd="segment_end",t.Transcript="transcript",t.Entity="entity",t.Intent="intent",t.TentativeTranscript="tentative_transcript",t.TentativeEntities="tentative_entities",t.TentativeIntent="tentative_intent"}(i||(i={})),function(t){t.Opened="WEBSOCKET_OPEN",t.Closed="WEBSOCKET_CLOSED",t.AudioProcessorReady="SOURCE_SAMPLE_RATE_SET_SUCCESS",t.VadSignalHigh="VadSignalHigh",t.VadSignalLow="VadSignalLow"}(o||(o={})),function(t){t.connect="connect",t.initAudioProcessor="initAudioProcessor",t.adjustAudioProcessor="adjustAudioProcessor",t.SET_SHARED_ARRAY_BUFFERS="SET_SHARED_ARRAY_BUFFERS",t.CLOSE="CLOSE",t.START_CONTEXT="START_CONTEXT",t.SWITCH_CONTEXT="SWITCH_CONTEXT",t.STOP_CONTEXT="STOP_CONTEXT",t.AUDIO="AUDIO",t.startStream="startStream",t.stopStream="stopStream"}(r||(r={}));const a=0,n=1,h=2;class l{constructor(t){this.targetSampleRate=16e3,this.isContextStarted=!1,this.frameMillis=30,this.outputAudioFrame=new Int16Array(this.frameMillis*this.targetSampleRate/1e3),this.debug=!1,this.onWebsocketClose=t=>{if(!this.websocket)throw Error("WebSocket is undefined");this.debug&&console.log("[WebSocketClient]","onWebsocketClose"),this.websocket.removeEventListener("open",this.onWebsocketOpen),this.websocket.removeEventListener("message",this.onWebsocketMessage),this.websocket.removeEventListener("error",this.onWebsocketError),this.websocket.removeEventListener("close",this.onWebsocketClose),this.websocket=void 0,this.workerCtx.postMessage({type:o.Closed,code:t.code,reason:t.reason,wasClean:t.wasClean})},this.onWebsocketOpen=t=>{this.debug&&console.log("[WebSocketClient]","websocket opened"),this.workerCtx.postMessage({type:o.Opened})},this.onWebsocketError=t=>{this.debug&&console.log("[WebSocketClient]","websocket error")},this.onWebsocketMessage=t=>{let e;try{e=JSON.parse(t.data)}catch(t){return void console.error("[WebSocketClient]","error parsing response from the server:",t)}this.workerCtx.postMessage(e)},this.workerCtx=t}connect(t,e,s,i){this.debug=i,this.debug&&console.log("[WebSocketClient]","connecting to ",t),this.targetSampleRate=s,this.isContextStarted=!1,this.websocket=new WebSocket(t,e),this.websocket.addEventListener("open",this.onWebsocketOpen),this.websocket.addEventListener("message",this.onWebsocketMessage),this.websocket.addEventListener("error",this.onWebsocketError),this.websocket.addEventListener("close",this.onWebsocketClose)}initAudioProcessor(i,r){this.audioProcessor=new e(i,this.targetSampleRate,5),r&&(this.audioProcessor.vad=new s(r),this.audioProcessor.onVadSignalHigh=()=>{var t,e,s,i,r;const a=null===(e=null===(t=this.audioProcessor)||void 0===t?void 0:t.vad)||void 0===e?void 0:e.vadOptions;a&&((null===(s=this.defaultContextOptions)||void 0===s?void 0:s.immediate)?a.enabled&&a.controlListening&&this.startContext():(null===(r=null===(i=this.audioProcessor)||void 0===i?void 0:i.vad)||void 0===r?void 0:r.vadOptions.enabled)&&a.enabled&&a.controlListening&&this.workerCtx.postMessage({type:o.VadSignalHigh}))},this.audioProcessor.onVadSignalLow=()=>{var t,e,s;const i=null===(e=null===(t=this.audioProcessor)||void 0===t?void 0:t.vad)||void 0===e?void 0:e.vadOptions;i&&((null===(s=this.defaultContextOptions)||void 0===s?void 0:s.immediate)?i.enabled&&i.controlListening&&this.stopContext():i.enabled&&i.controlListening&&this.workerCtx.postMessage({type:o.VadSignalLow}))}),this.audioProcessor.sendAudio=(e,s,i)=>{t.convertFloatToInt16(e,this.outputAudioFrame,s,i),this.send(this.outputAudioFrame)},this.workerCtx.postMessage({type:o.AudioProcessorReady})}adjustAudioProcessor(t){if(!this.audioProcessor)throw new Error("No AudioProcessor");if(t.vad){if(!this.audioProcessor.vad)throw new Error("No VAD in AudioProcessor. Did you define `vad` in BrowserClient constructor parameters?");this.audioProcessor.vad.adjustVadOptions(t.vad)}}setSharedArrayBuffers(t,e){this.controlSAB=new Int32Array(t),this.dataSAB=new Float32Array(e);const s=this.dataSAB.length/32;this.debug&&console.log("[WebSocketClient]","Audio handle interval",s,"ms"),setInterval(this.processAudioSAB.bind(this),s)}startStream(t){if(!this.audioProcessor)throw new Error("No AudioProcessor");this.defaultContextOptions=t,this.audioProcessor.resetStream()}stopStream(){if(!this.audioProcessor)throw new Error("No AudioProcessor");this.isContextStarted&&this.stopContext(),this.defaultContextOptions=void 0}processAudio(t){if(!this.audioProcessor)throw new Error("No AudioProcessor");this.audioProcessor.processAudio(t)}processAudioSAB(){if(!this.controlSAB||!this.dataSAB)throw new Error("No SharedArrayBuffers");const t=this.controlSAB[n];if(0===this.controlSAB[h]&&t>0){const e=this.dataSAB.subarray(0,t);this.controlSAB[n]=0,this.controlSAB[a]=0,e.length>0&&this.processAudio(e)}}startContext(t){if(!this.audioProcessor)throw Error("No AudioProcessor");this.isContextStarted?console.error("[WebSocketClient]","can't start context: active context exists"):(this.audioProcessor.startContext(),this.isContextStarted=!0,void 0!==t?this.send(JSON.stringify({event:"start",appId:t})):this.send(JSON.stringify({event:"start"})))}stopContext(){if(!this.audioProcessor)throw Error("No AudioProcessor");if(!this.isContextStarted)return void console.error("[WebSocketClient]","can't stop context: no active context");this.audioProcessor.stopContext(),this.isContextStarted=!1;const t=JSON.stringify({event:"stop"});this.send(t)}switchContext(t){if(!this.websocket)throw Error("WebSocket is undefined");if(!this.isContextStarted)return void console.error("[WebSocketClient]","can't switch context: no active context");if(void 0===t)return void console.error("[WebSocketClient]","can't switch context: new app id is undefined");const e=JSON.stringify({event:"stop"});this.send(e),this.send(JSON.stringify({event:"start",appId:t}))}closeWebsocket(t=1005,e="No Status Received"){if(this.debug&&console.log("[WebSocketClient]","Websocket closing"),!this.websocket)throw Error("WebSocket is undefined");this.websocket.close(t,e)}send(t){if(!this.websocket)throw new Error("No Websocket");if(this.websocket.readyState!==this.websocket.OPEN)throw new Error(`Expected OPEN Websocket state, but got ${this.websocket.readyState}`);try{this.websocket.send(t)}catch(t){console.log("[WebSocketClient]","server connection error",t)}}}const d=self,c=new l(d);return d.onmessage=function(t){switch(t.data.type){case r.connect:c.connect(t.data.apiUrl,t.data.authToken,t.data.targetSampleRate,t.data.debug);break;case r.initAudioProcessor:c.initAudioProcessor(t.data.sourceSampleRate,t.data.vadOptions);break;case r.adjustAudioProcessor:c.adjustAudioProcessor(t.data.params);break;case r.SET_SHARED_ARRAY_BUFFERS:c.setSharedArrayBuffers(t.data.controlSAB,t.data.dataSAB);break;case r.CLOSE:c.closeWebsocket(1e3,"Close requested by client");break;case r.startStream:c.startStream(t.data.options);break;case r.stopStream:c.stopStream();break;case r.START_CONTEXT:c.startContext(t.data.appId);break;case r.SWITCH_CONTEXT:c.switchContext(t.data.appId);break;case r.STOP_CONTEXT:c.stopContext();break;case r.AUDIO:c.processAudio(t.data.payload);break;default:console.log("WORKER",t)}},l}();

",this.worker.addEventListener("message",this.onWebsocketMessage)}onResponse(t){this.onResponseCb=t}onClose(t){this.onCloseCb=t}initialize(e,i,n,s){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:t.ControllerSignal.connect,apiUrl:e,authToken:i,targetSampleRate:n,debug:s}),this.startCbs=[],this.stopCbs=[],new Promise((t=>{this.resolveInitialization=t}))}))}initAudioProcessor(e,i){return o(this,void 0,void 0,(function*(){return this.worker.postMessage({type:t.ControllerSignal.initAudioProcessor,sourceSampleRate:e,vadOptions:i}),new Promise((t=>{this.resolveSourceSampleRateSet=t}))}))}adjustAudioProcessor(e){this.worker.postMessage({type:t.ControllerSignal.adjustAudioProcessor,params:e})}close(){return o(this,void 0,void 0,(function*(){return new Promise(((e,i)=>{this.worker.postMessage({type:t.ControllerSignal.CLOSE,code:1e3,message:"Client has ended the session"}),e()}))}))}startStream(e){return o(this,void 0,void 0,(function*(){this.worker.postMessage({type:t.ControllerSignal.startStream,options:e})}))}stopStream(){return o(this,void 0,void 0,(function*(){this.worker.postMessage({type:t.ControllerSignal.stopStream})}))}startContext(e){return o(this,void 0,void 0,(function*(){return new Promise(((i,n)=>{this.startCbs.push(((t,e)=>{void 0!==t?n(t):i(e)})),null!=e?this.worker.postMessage({type:t.ControllerSignal.START_CONTEXT,appId:e}):this.worker.postMessage({type:t.ControllerSignal.START_CONTEXT})}))}))}stopContext(){return o(this,void 0,void 0,(function*(){return new Promise(((e,i)=>{this.stopCbs.push(((t,n)=>{void 0!==t?i(t):e(n)})),this.worker.postMessage({type:t.ControllerSignal.STOP_CONTEXT})}))}))}switchContext(e){return o(this,void 0,void 0,(function*(){return new Promise(((i,n)=>{this.startCbs.push(((t,e)=>{void 0!==t?n(t):i(e)})),this.worker.postMessage({type:t.ControllerSignal.SWITCH_CONTEXT,appId:e})}))}))}postMessage(t){this.worker.postMessage(t)}sendAudio(e){this.worker.postMessage({type:t.ControllerSignal.AUDIO,payload:e})}}class I{constructor(){this.storage=window.localStorage}get(t){return this.storage.getItem(t)}set(t,e){this.storage.setItem(t,e)}getOrSet(t,e){let i=this.storage.getItem(t);return null===i&&(i=e(),this.storage.setItem(t,i)),i}}function x(t,e){return{intent:t.intent,isFinal:e}}const f="speechly-auth-token";class k{constructor(e){var i,o,a,d,c,l,r,h;if(this.activeContexts=new Map,this.maxReconnectAttemptCount=10,this.contextStopDelay=250,this.connectAttempt=0,this.connectPromise=null,this.cbs=[],this.state=t.DecoderState.Disconnected,this.handleWebsocketResponse=e=>{switch(this.debug&&console.log("[Decoder]","Received response",e),e.type){case t.WorkerSignal.VadSignalHigh:this.cbs.forEach((t=>t.onVadStateChange.forEach((t=>t(!0)))));break;case t.WorkerSignal.VadSignalLow:this.cbs.forEach((t=>t.onVadStateChange.forEach((t=>t(!1)))));break;case t.WebsocketResponseType.Started:this.activeContexts.set(e.audio_context,new Map),this.cbs.forEach((t=>t.contextStartedCbs.forEach((t=>t(e.audio_context)))));break;case t.WebsocketResponseType.Stopped:this.activeContexts.delete(e.audio_context),this.cbs.forEach((t=>t.contextStoppedCbs.forEach((t=>t(e.audio_context)))));break;default:this.handleSegmentUpdate(e)}},this.handleSegmentUpdate=e=>{var i;const{audio_context:n,segment_id:o,type:a}=e;let{data:d}=e;const c=this.activeContexts.get(n);if(void 0===c)return void console.warn("[Decoder]","Received response for non-existent context",n);let l=null!==(i=c.get(o))&&void 0!==i?i:new s(n,o);switch(a){case t.WebsocketResponseType.TentativeTranscript:const e=function(t){return t.words.map((({word:t,index:e,start_timestamp:i,end_timestamp:n})=>({value:t,index:e,startTimestamp:i,endTimestamp:n,isFinal:!1})))}(d),i=d.transcript;this.cbs.forEach((t=>t.tentativeTranscriptCbs.forEach((t=>t(n,o,e,i))))),l=l.updateTranscript(e);break;case t.WebsocketResponseType.Transcript:const s=function(t){return{value:t.word,index:t.index,startTimestamp:t.start_timestamp,endTimestamp:t.end_timestamp,isFinal:!0}}(d);this.cbs.forEach((t=>t.transcriptCbs.forEach((t=>t(n,o,s))))),l=l.updateTranscript([s]);break;case t.WebsocketResponseType.TentativeEntities:const a=function(t){return t.entities.map((({entity:t,value:e,start_position:i,end_position:n})=>({type:t,value:e,startPosition:i,endPosition:n,isFinal:!1})))}(d);this.cbs.forEach((t=>t.tentativeEntityCbs.forEach((t=>t(n,o,a))))),l=l.updateEntities(a);break;case t.WebsocketResponseType.Entity:const c=function(t){return{type:t.entity,value:t.value,startPosition:t.start_position,endPosition:t.end_position,isFinal:!0}}(d);this.cbs.forEach((t=>t.entityCbs.forEach((t=>t(n,o,c))))),l=l.updateEntities([c]);break;case t.WebsocketResponseType.TentativeIntent:const r=x(d,!1);this.cbs.forEach((t=>t.tentativeIntentCbs.forEach((t=>t(n,o,r))))),l=l.updateIntent(r);break;case t.WebsocketResponseType.Intent:const h=x(d,!0);this.cbs.forEach((t=>t.intentCbs.forEach((t=>t(n,o,h))))),l=l.updateIntent(h);break;case t.WebsocketResponseType.SegmentEnd:l=l.finalize()}c.set(o,l),this.activeContexts.set(n,c),this.logSegments&&console.info(l.toString()),this.cbs.forEach((t=>t.segmentChangeCbs.forEach((t=>t(l.toSegment())))))},this.handleWebsocketClosure=e=>{if(1e3===e.code)this.debug&&console.log("[Decoder]","Websocket closed",e);else{if(console.error("[Decoder]","Websocket closed due to error",e),void 0===this.deviceId)return void this.setState(t.DecoderState.Failed);this.setState(t.DecoderState.Disconnected),this.reconnect()}},this.logSegments=null!==(i=e.logSegments)&&void 0!==i&&i,this.appId=null!==(o=e.appId)&&void 0!==o?o:void 0,this.projectId=null!==(a=e.projectId)&&void 0!==a?a:void 0,this.sampleRate=null!==(d=e.sampleRate)&&void 0!==d?d:n,this.debug=null!==(c=e.debug)&&void 0!==c&&c,void 0!==this.appId&&void 0!==this.projectId)throw Error("[Decoder] You cannot use both appId and projectId at the same time");if(void 0===this.appId&&void 0===this.projectId)throw Error("[Decoder] Either an appId or a projectId is required");const u=null!==(l=e.apiUrl)&&void 0!==l?l:"https://api.speechly.com";this.apiUrl=function(t,e){const i=new URLSearchParams;return i.append("sampleRate",e.toString()),`${t}?${i.toString()}`}(u.replace("http","ws")+"/ws/v1",this.sampleRate),this.loginUrl=`${u}/login`,this.storage=null!==(r=e.storage)&&void 0!==r?r:new I,this.deviceId=this.storage.getOrSet("speechly-device-id",g),this.apiClient=new z,this.apiClient.onResponse(this.handleWebsocketResponse),this.apiClient.onClose(this.handleWebsocketClosure),(null===(h=e.connect)||void 0===h||h)&&this.connect()}getReconnectDelayMs(t){return 100*Math.pow(2,t)}sleep(t){return o(this,void 0,void 0,(function*(){return new Promise((e=>setTimeout(e,t)))}))}connect(){return o(this,void 0,void 0,(function*(){null===this.connectPromise&&(this.connectPromise=(()=>o(this,void 0,void 0,(function*(){const e=this.storage.get(f);if(null!=e&&T(e,this.projectId,this.appId,this.deviceId))this.authToken=e;else try{this.authToken=yield function(t,e,i,n,s=fetch,a=Date.now){var d;return o(this,void 0,void 0,(function*(){let o;o=void 0!==e?{projectId:e,deviceId:n}:{appId:i,deviceId:n};const c=yield s(t,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(o)}),l=yield c.json();if(200!==c.status)throw Error(null!==(d=l.error)&&void 0!==d?d:`Speechly API login request failed with ${c.status}`);if(void 0===l.access_token)throw Error("Invalid login response from Speechly API");if(!T(l.access_token,e,i,n,a))throw Error("Invalid token received from Speechly API");return l.access_token}))}(this.loginUrl,this.projectId,this.appId,this.deviceId),this.storage.set(f,this.authToken)}catch(e){throw this.setState(t.DecoderState.Failed),e}yield this.apiClient.initialize(this.apiUrl,this.authToken,this.sampleRate,this.debug),this.advanceState(t.DecoderState.Connected)})))()),yield this.connectPromise}))}adjustAudioProcessor(t){this.apiClient.adjustAudioProcessor(t)}close(){return o(this,void 0,void 0,(function*(){let e;try{yield this.apiClient.close()}catch(t){e=t.message}if(this.activeContexts.clear(),this.connectPromise=null,this.setState(t.DecoderState.Disconnected),void 0!==e)throw Error(e)}))}startStream(t){return o(this,void 0,void 0,(function*(){yield this.apiClient.startStream(t)}))}stopStream(){return o(this,void 0,void 0,(function*(){this.state===t.DecoderState.Active&&(yield this.stopContext()),yield this.apiClient.stopStream()}))}startContext(e){return o(this,void 0,void 0,(function*(){if(this.state===t.DecoderState.Failed)throw Error("[Decoder] startContext cannot be run in Failed state.");if(this.state<t.DecoderState.Connected)yield this.connect();else if(this.state>t.DecoderState.Connected)throw Error("[Decoder] Unable to complete startContext: Expected Connected state, but was in "+y(this.state)+".");let n;if(this.setState(t.DecoderState.Active),null!=this.projectId)n=yield this.apiClient.startContext(null==e?void 0:e.appId);else{if(null!=(null==e?void 0:e.appId)&&this.appId!==(null==e?void 0:e.appId))throw this.setState(t.DecoderState.Failed),i;n=yield this.apiClient.startContext()}if(this.state<t.DecoderState.Active)throw Error("[Decoder] Unable to complete startContext: Problem acquiring contextId");return n}))}sendAudio(t){this.apiClient.sendAudio(t)}stopContext(){return o(this,void 0,void 0,(function*(){if(this.state===t.DecoderState.Failed)throw Error("[Decoder] stopContext cannot be run in unrecovable error state.");if(this.state!==t.DecoderState.Active)throw Error("[Decoder] Unable to complete stopContext: Expected Active state, but was in "+y(this.state)+".");this.setState(t.DecoderState.Connected),yield this.sleep(this.contextStopDelay);try{return yield this.apiClient.stopContext()}catch(e){throw this.setState(t.DecoderState.Failed),e}}))}switchContext(e){return o(this,void 0,void 0,(function*(){if(this.state!==t.DecoderState.Active)throw Error("[Decoder] Unable to complete switchContext: Expected Active state, but was in "+y(this.state)+".");const i=yield this.apiClient.switchContext(e);this.activeContexts.set(i,new Map)}))}registerListener(t){this.cbs.push(t)}initAudioProcessor(t,e){return o(this,void 0,void 0,(function*(){this.sampleRate=t,yield this.apiClient.initAudioProcessor(t,e)}))}useSharedArrayBuffers(t,e){this.apiClient.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e})}reconnect(){return o(this,void 0,void 0,(function*(){this.debug&&console.log("[Decoder]","Reconnecting...",this.connectAttempt),this.connectPromise=null,this.connectAttempt<this.maxReconnectAttemptCount?(yield this.sleep(this.getReconnectDelayMs(this.connectAttempt++)),yield this.connect()):console.error("[Decoder] Maximum reconnect count reached, giving up automatic reconnect.")}))}advanceState(t){this.state>=t||this.setState(t)}setState(t){this.state!==t&&(this.debug&&console.log("[Decoder]",y(this.state),"->",y(t)),this.state=t,this.cbs.forEach((e=>{var i;return null===(i=e.stateChangeCbs)||void 0===i?void 0:i.forEach((e=>e(t)))})))}}t.BrowserClient=class{constructor(t){var e,i;this.debug=!1,this.initialized=!1,this.isStreaming=!1,this.isStreamAutoStarted=!1,this.active=!1,this.listeningPromise=null,this.stats={maxSignalEnergy:0,sentSamples:0};const n=window.navigator.mediaDevices.getSupportedConstraints();this.nativeResamplingSupported=!0===n.sampleRate,this.isMobileSafari=["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].indexOf(navigator.platform)>=0||navigator.userAgent.includes("Mac")&&"ontouchend"in document,this.isSafari=this.isMobileSafari||void 0!==window.safari,this.useSAB=!this.isSafari,this.vadOptions=Object.assign(Object.assign({},b),t.vad),this.debug=null===(e=t.debug)||void 0===e||e,this.callbacks=new Z,this.callbacks.onVadStateChange.push(this.onVadStateChange.bind(this)),this.decoder=null!==(i=t.decoder)&&void 0!==i?i:new k(t),this.decoder.registerListener(this.callbacks)}onVadStateChange(t){var e;this.debug&&console.log("[BrowserClient]","onVadStateChange",t),(null===(e=this.vadOptions)||void 0===e?void 0:e.controlListening)&&(t?this.active||this.start():this.active&&this.stop())}initialize(t){var i,s;return o(this,void 0,void 0,(function*(){if(!this.initialized){this.initialized=!0,this.debug&&console.log("[BrowserClient]","initializing"),yield this.decoder.connect();try{const t={};if(this.nativeResamplingSupported&&(t.sampleRate=n),void 0!==window.webkitAudioContext)try{this.audioContext=new window.webkitAudioContext(t)}catch(t){this.debug&&console.log("[BrowserClient]","creating audioContext without samplerate conversion",t),this.audioContext=new window.webkitAudioContext}else this.audioContext=new window.AudioContext(t),void 0!==window.webkitAudioContext&&(yield this.audioContext.resume())}catch(t){throw e}if(this.isSafari||void 0===window.AudioWorkletNode){if(this.debug&&console.log("[BrowserClient]","using ScriptProcessorNode"),void 0!==window.webkitAudioContext){const t=this.audioContext.sampleRate/n,e=4096*Math.pow(2,Math.ceil(Math.log(t)/Math.log(2)));this.audioProcessor=this.audioContext.createScriptProcessor(e,1,1)}else this.audioProcessor=this.audioContext.createScriptProcessor(void 0,1,1);this.audioProcessor.connect(this.audioContext.destination),this.audioProcessor.addEventListener("audioprocess",(t=>{this.handleAudio(t.inputBuffer.getChannelData(0))}))}else{this.debug&&console.log("[BrowserClient]","using AudioWorkletNode");const t=new Blob(["\n// Indices for the Control SAB.\nconst CONTROL = {\n 'WRITE_INDEX': 0,\n 'FRAMES_AVAILABLE': 1,\n 'LOCK': 2,\n};\n\nclass SpeechlyProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n\n this._initialized = false;\n this.debug = false;\n this.port.onmessage = this._initialize.bind(this);\n }\n\n _initialize(event) {\n this.controlSAB = new Int32Array(event.data.controlSAB);\n this.dataSAB = new Float32Array(event.data.dataSAB);\n this.debug = event.data.debug;\n if (this.debug) {\n console.log('[BrowserClient AudioWorkletNode]', 'initializing audioworklet');\n }\n this.sharedBufferSize = this.dataSAB.length;\n this.buffer = new Float32Array(0);\n this._initialized = true;\n }\n\n _transferDataToSharedBuffer(data) {\n this.controlSAB[CONTROL.LOCK] = 1;\n let inputWriteIndex = this.controlSAB[CONTROL.WRITE_INDEX];\n if (this.controlSAB[CONTROL.FRAMES_AVAILABLE] > 0) {\n if (inputWriteIndex + data.length > this.sharedBufferSize) {\n // console.log('buffer overflow')\n inputWriteIndex = 0;\n }\n }\n this.dataSAB.set(data, inputWriteIndex);\n this.controlSAB[CONTROL.WRITE_INDEX] = inputWriteIndex + data.length;\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = inputWriteIndex + data.length;\n this.controlSAB[CONTROL.LOCK] = 0;\n }\n\n _pushData(data) {\n if (this.debug) {\n const signalEnergy = getStandardDeviation(data)\n this.port.postMessage({\n type: 'STATS',\n signalEnergy: signalEnergy,\n samples: data.length,\n });\n }\n\n if (this.buffer.length > this.sharedBufferSize) {\n const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize);\n this._transferDataToSharedBuffer(dataToTransfer);\n this.buffer = this.buffer.subarray(this.sharedBufferSize);\n }\n let concat = new Float32Array(this.buffer.length + data.length);\n concat.set(this.buffer);\n concat.set(data, this.buffer.length);\n this.buffer = concat;\n }\n\n process(inputs, outputs, parameters) {\n const inputChannelData = inputs[0][0];\n if (inputChannelData !== undefined) {\n if (this.controlSAB && this.dataSAB) {\n this._pushData(inputChannelData);\n } else {\n this.port.postMessage({\n type: 'DATA',\n frames: inputChannelData\n });\n }\n }\n\n return true;\n }\n}\n\nfunction getStandardDeviation(array) {\n const n = array.length\n const mean = array.reduce((a, b) => a + b) / n\n return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n)\n}\n\nregisterProcessor('speechly-worklet', SpeechlyProcessor);\n"],{type:"text/javascript"}),e=window.URL.createObjectURL(t);if(yield this.audioContext.audioWorklet.addModule(e),this.speechlyNode=new AudioWorkletNode(this.audioContext,"speechly-worklet"),this.speechlyNode.connect(this.audioContext.destination),this.useSAB&&void 0!==window.SharedArrayBuffer){this.debug&&console.log("[BrowserClient]","using SharedArrayBuffer");const t=new window.SharedArrayBuffer(4*Int32Array.BYTES_PER_ELEMENT),e=new window.SharedArrayBuffer(1024*Float32Array.BYTES_PER_ELEMENT);this.decoder.useSharedArrayBuffers(t,e),this.speechlyNode.port.postMessage({type:"SET_SHARED_ARRAY_BUFFERS",controlSAB:t,dataSAB:e,debug:this.debug})}else this.debug&&console.log("[BrowserClient]","can not use SharedArrayBuffer");this.speechlyNode.port.onmessage=t=>{switch(t.data.type){case"STATS":t.data.signalEnergy>this.stats.maxSignalEnergy&&(this.stats.maxSignalEnergy=t.data.signalEnergy),this.stats.sentSamples+=parseInt(t.data.samples);break;case"DATA":this.handleAudio(t.data.frames)}}}this.debug&&console.log("[BrowserClient]","audioContext sampleRate is",null===(i=this.audioContext)||void 0===i?void 0:i.sampleRate),yield this.decoder.initAudioProcessor(null===(s=this.audioContext)||void 0===s?void 0:s.sampleRate,this.vadOptions),this.vadOptions&&(yield this.startStream()),(null==t?void 0:t.mediaStream)&&(yield this.attach(null==t?void 0:t.mediaStream))}}))}adjustAudioProcessor(t){this.decoder.adjustAudioProcessor(t)}close(){var t,e,i;return o(this,void 0,void 0,(function*(){yield this.detach(),null!==this.speechlyNode&&(null===(t=this.speechlyNode)||void 0===t||t.port.close(),null===(e=this.speechlyNode)||void 0===e||e.disconnect()),void 0!==this.audioProcessor&&(null===(i=this.audioProcessor)||void 0===i||i.disconnect()),yield this.decoder.close(),this.initialized=!1}))}attach(t){var e,i,n,s,a,d;return o(this,void 0,void 0,(function*(){if(yield this.initialize(),yield this.detach(),this.stream=null===(e=this.audioContext)||void 0===e?void 0:e.createMediaStreamSource(t),"running"!==(null===(i=this.audioContext)||void 0===i?void 0:i.state)&&(this.debug&&console.log("[BrowserClient]","audioContext resume required, state is",null===(n=this.audioContext)||void 0===n?void 0:n.state),yield null===(s=this.audioContext)||void 0===s?void 0:s.resume()),this.speechlyNode)null===(a=this.stream)||void 0===a||a.connect(this.speechlyNode);else{if(!this.audioProcessor)throw Error("[BrowserClient] cannot attach to mediaStream, not initialized");null===(d=this.stream)||void 0===d||d.connect(this.audioProcessor)}}))}detach(){return o(this,void 0,void 0,(function*(){this.active&&(yield this.stop()),this.stream&&(this.stream.disconnect(),this.stream=void 0)}))}uploadAudioData(t,e){var i,n,s;return o(this,void 0,void 0,(function*(){yield this.initialize();const o=yield null===(i=this.audioContext)||void 0===i?void 0:i.decodeAudioData(t);if(void 0===o)throw Error("Could not decode audioData");const a=o.getChannelData(0);if(o.numberOfChannels>1){const t=o.getChannelData(1);for(let e=0;e<a.length;e++)a[e]=(a[e]+t[e])/2}let d;yield this.startStream({immediate:!0});const c=(null===(n=this.vadOptions)||void 0===n?void 0:n.enabled)&&(null===(s=this.vadOptions)||void 0===s?void 0:s.controlListening);let l;d=c?"multiple context ids":yield this.start(e);for(let t=0;t<a.length;t+=16e3){const e=t+16e3;l=e>a.length?a.slice(t):a.slice(t,e),this.handleAudio(l)}return c||(yield this.stop()),yield this.stopStream(),d}))}startStream(t){return o(this,void 0,void 0,(function*(){yield this.decoder.startStream(t),this.isStreaming=!0}))}stopStream(){return o(this,void 0,void 0,(function*(){yield this.decoder.stopStream(),this.isStreaming=!1,this.isStreamAutoStarted=!1}))}queueTask(t){return o(this,void 0,void 0,(function*(){const e=this.listeningPromise;return this.listeningPromise=(()=>o(this,void 0,void 0,(function*(){return yield e,t()})))(),this.listeningPromise}))}start(t){return o(this,void 0,void 0,(function*(){return yield this.queueTask((()=>o(this,void 0,void 0,(function*(){yield this.initialize(),this.isStreaming||(yield this.startStream(),this.isStreamAutoStarted=!0);const e=this.decoder.startContext(t);return this.active=!0,e}))))}))}stop(){return o(this,void 0,void 0,(function*(){return yield this.queueTask((()=>o(this,void 0,void 0,(function*(){let t=null;try{t=yield this.decoder.stopContext(),this.isStreaming&&this.isStreamAutoStarted&&(yield this.stopStream()),0===this.stats.sentSamples&&console.warn("[BrowserClient]","audioContext contained no audio data")}catch(t){console.warn("[BrowserClient]","stop() failed",t)}finally{this.active=!1,this.stats.sentSamples=0}return t}))))}))}handleAudio(t){this.isStreaming&&(this.stats.sentSamples+=t.length,this.decoder.sendAudio(t),console.log("handleAudio"))}isActive(){return this.active}onSegmentChange(t){this.callbacks.segmentChangeCbs.push(t)}onTranscript(t){this.callbacks.transcriptCbs.push(t)}onEntity(t){this.callbacks.entityCbs.push(t)}onIntent(t){this.callbacks.intentCbs.push(t)}onTentativeTranscript(t){this.callbacks.tentativeTranscriptCbs.push(t)}onTentativeEntities(t){this.callbacks.tentativeEntityCbs.push(t)}onTentativeIntent(t){this.callbacks.tentativeIntentCbs.push(t)}onStateChange(t){this.callbacks.stateChangeCbs.push(t)}},t.BrowserMicrophone=class{constructor(){this.muted=!1,this.initialized=!1;try{const t=window.navigator.mediaDevices.getSupportedConstraints();this.nativeResamplingSupported=!0===t.sampleRate,this.autoGainControlSupported=!0===t.autoGainControl}catch(t){this.nativeResamplingSupported=!1,this.autoGainControlSupported=!1}}initialize(){var t;return o(this,void 0,void 0,(function*(){if(this.initialized)return;if(void 0===(null===(t=window.navigator)||void 0===t?void 0:t.mediaDevices))throw e;const i={video:!1};this.nativeResamplingSupported||this.autoGainControlSupported?i.audio={sampleRate:n,autoGainControl:this.autoGainControlSupported}:i.audio=!0;try{this.mediaStream=yield window.navigator.mediaDevices.getUserMedia(i)}catch(t){throw console.error(t),c}this.initialized=!0,this.muted=!0}))}close(){return o(this,void 0,void 0,(function*(){if(!this.initialized)throw a;this.muted=!0;this.mediaStream.getTracks().forEach((t=>t.stop())),this.mediaStream=void 0,this.initialized=!1}))}isRecording(){return!this.muted}},t.CloudDecoder=k,t.DefaultSampleRate=n,t.ErrAlreadyInitialized=d,t.ErrAppIdChangeWithoutProjectLogin=i,t.ErrDeviceNotSupported=e,t.ErrKeyNotFound=p,t.ErrNoAudioConsent=c,t.ErrNoStorageSupport=u,t.ErrNotInitialized=a,t.EventCallbacks=Z,t.SegmentState=s,t.VadDefaultOptions=b,t.stateToString=y,Object.defineProperty(t,"__esModule",{value:!0})}));
//# sourceMappingURL=speechly.umd.min.js.map

@@ -1,2 +0,2 @@

import { DecoderState, DecoderOptions, ContextOptions } from './types';
import { DecoderState, DecoderOptions, ContextOptions, AudioProcessorParameters } from './types';
import { Segment, Word, Entity, Intent } from '../speechly';

@@ -17,3 +17,6 @@ /**

private readonly callbacks;
private readonly vadOptions?;
private initialized;
private isStreaming;
private isStreamAutoStarted;
private active;

@@ -23,2 +26,3 @@ private speechlyNode?;

private stream?;
private listeningPromise;
private stats;

@@ -31,6 +35,5 @@ /**

constructor(options: DecoderOptions);
onVadStateChange(active: boolean): void;
/**
* Create an AudioContext for resampling audio.
*
* @param options - shorthand for attaching to existing mediaStream
*/

@@ -41,2 +44,7 @@ initialize(options?: {

/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void;
/**
* Closes the client, detaching from any audio source and disconnecting any audio

@@ -64,2 +72,14 @@ * processors.

/**
* If the application starts and resumes the flow of audio, `startStream` should be called at start of a continuous audio stream.
* If you're using VAD that controls starting and stopping audio contexts automatically, you can pass optional inference time options.
* It resets the stream sample counters and history.
*/
startStream(defaultContextOptions?: ContextOptions): Promise<void>;
/**
* If the application starts and resumes the flow of audio, `stopStream` should be called at the end of a continuous audio stream.
* It ensures that all of the internal audio buffers are flushed for processing.
*/
stopStream(): Promise<void>;
private queueTask;
/**
* Starts a new audio context, returning it's id to use for matching received responses.

@@ -66,0 +86,0 @@ * If an active context already exists, an error is thrown.

@@ -1,2 +0,2 @@

import { DecoderOptions, DecoderState, EventCallbacks, ContextOptions } from './types';
import { DecoderOptions, DecoderState, EventCallbacks, ContextOptions, VadOptions, AudioProcessorParameters } from './types';
/**

@@ -23,3 +23,2 @@ * A client for Speechly Spoken Language Understanding (SLU) API. The client handles initializing the websocket

private connectPromise;
private listeningPromise;
private authToken?;

@@ -40,5 +39,12 @@ private readonly cbs;

/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void;
/**
* Closes the client by closing the API connection and disabling the microphone.
*/
close(): Promise<void>;
startStream(defaultContextOptions?: ContextOptions): Promise<void>;
stopStream(): Promise<void>;
/**

@@ -64,5 +70,6 @@ * Starts a new SLU context by sending a start context event to the API.

registerListener(listener: EventCallbacks): void;
setSampleRate(sr: number): Promise<void>;
initAudioProcessor(sampleRate: number, vadOptions?: VadOptions): Promise<void>;
useSharedArrayBuffers(controlSAB: any, dataSAB: any): void;
private readonly handleWebsocketResponse;
private readonly handleSegmentUpdate;
private readonly handleWebsocketClosure;

@@ -69,0 +76,0 @@ private reconnect;

@@ -51,4 +51,66 @@ import { Segment, Word, Entity, Intent } from '../speechly';

storage?: Storage;
/**
* Enable voice activity detection (VAD) configuration overrides
*/
vad?: Partial<VadOptions>;
}
/**
* Options for voice activity detection (VAD)
* @public
*/
export interface VadOptions {
/**
* Run energy analysis
*/
enabled: boolean;
/**
* Signal-to-noise energy ratio needed for frame to be 'loud'.
* Default: 3.0 [dB].
*/
signalToNoiseDb: number;
/**
* Energy threshold - below this won't trigger activation.
* Range: -90.0f to 0.0f [dB]. Default: -24 [dB].
*/
noiseGateDb: number;
/**
* Rate of background noise learn. Defined as duration in which background noise energy is moved halfway towards current frame's energy.
* Range: 0, 5000 [ms]. Default: 400 [ms].
*/
noiseLearnHalftimeMillis: number;
/**
* Number of past frames analyzed for energy threshold VAD. Should be less or equal than HistoryFrames.
* Range: 1 to 32 [frames]. Default: 5 [frames].
*/
signalSearchFrames: number;
/**
* Minimum 'signal' to 'silent' frame ratio in history to activate 'IsSignalDetected'
* Range: 0.0 to 1.0. Default: 0.7.
*/
signalActivation: number;
/**
* Maximum 'signal' to 'silent' frame ratio in history to inactivate 'IsSignalDetected'. Only evaluated when the sustain period is over.
* Range: 0.0 to 1.0. Default: 0.2.
*/
signalRelease: number;
/**
* Duration to keep 'IsSignalDetected' active. Renewed as long as VADActivation is holds true.
* Range: 0 to 8000 [ms]. Default: 3000 [ms].
*/
signalSustainMillis: number;
/**
* Enable listening control if you want to use IsSignalDetected to control SLU start / stop.
* Default: true.
*/
controlListening: boolean;
}
export interface AudioProcessorParameters {
vad?: Partial<VadOptions>;
}
/**
* Default options for voice activity detection (VAD)
* @public
*/
export declare const VadDefaultOptions: VadOptions;
/**
* All possible states of a Speechly API client. Failed state is non-recoverable.

@@ -80,2 +142,3 @@ * It is also possible to use arithmetics for state comparison, e.g. `if (state < speechly.ClientState.Disconnected)`,

contextStoppedCbs: Array<(contextId: string) => void>;
onVadStateChange: Array<(active: boolean) => void>;
}

@@ -88,2 +151,7 @@ /**

appId?: string;
/**
* BrowserClient.uploadAudioData internally uses this to set audio worker
* to ‘immediate audio processor’ mode where it can control start/stop context at its own pace.
*/
immediate?: boolean;
}

@@ -10,3 +10,3 @@ /**

private readonly nativeResamplingSupported;
private readonly autoGainControl;
private readonly autoGainControlSupported;
mediaStream?: MediaStream;

@@ -13,0 +13,0 @@ constructor();

@@ -0,1 +1,2 @@

import { AudioProcessorParameters, ContextOptions, VadOptions } from '../client';
/**

@@ -9,3 +10,3 @@ * The interface for response returned by WebSocket client.

*/
type: WebsocketResponseType;
type: WebsocketResponseType | WorkerSignal;
/**

@@ -33,5 +34,2 @@ * Audio context ID.

export declare enum WebsocketResponseType {
Opened = "WEBSOCKET_OPEN",
Closed = "WEBSOCKET_CLOSED",
SourceSampleRateSetSuccess = "SOURCE_SAMPLE_RATE_SET_SUCCESS",
Started = "started",

@@ -48,2 +46,30 @@ Stopped = "stopped",

/**
* Messages from worker to controller
* @public
*/
export declare enum WorkerSignal {
Opened = "WEBSOCKET_OPEN",
Closed = "WEBSOCKET_CLOSED",
AudioProcessorReady = "SOURCE_SAMPLE_RATE_SET_SUCCESS",
VadSignalHigh = "VadSignalHigh",
VadSignalLow = "VadSignalLow"
}
/**
* Messages from controller to worker
* @public
*/
export declare enum ControllerSignal {
connect = "connect",
initAudioProcessor = "initAudioProcessor",
adjustAudioProcessor = "adjustAudioProcessor",
SET_SHARED_ARRAY_BUFFERS = "SET_SHARED_ARRAY_BUFFERS",
CLOSE = "CLOSE",
START_CONTEXT = "START_CONTEXT",
SWITCH_CONTEXT = "SWITCH_CONTEXT",
STOP_CONTEXT = "STOP_CONTEXT",
AUDIO = "AUDIO",
startStream = "startStream",
stopStream = "stopStream"
}
/**
* Transcript response payload.

@@ -178,4 +204,9 @@ * @public

*/
setSourceSampleRate(sourceSampleRate: number): Promise<void>;
initAudioProcessor(sourceSampleRate: number, vadOptions?: VadOptions): Promise<void>;
/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void;
/**
* Closes the client.

@@ -215,2 +246,4 @@ *

postMessage(message: Object): void;
startStream(defaultContextOptions?: ContextOptions): Promise<void>;
stopStream(): Promise<void>;
}
import { APIClient, ResponseCallback, CloseCallback } from './types';
import { AudioProcessorParameters, ContextOptions, VadOptions } from '../client';
export declare class WebWorkerController implements APIClient {

@@ -14,4 +15,11 @@ private readonly worker;

initialize(apiUrl: string, authToken: string, targetSampleRate: number, debug: boolean): Promise<void>;
setSourceSampleRate(sourceSampleRate: number): Promise<void>;
initAudioProcessor(sourceSampleRate: number, vadOptions?: VadOptions): Promise<void>;
/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void;
close(): Promise<void>;
startStream(defaultContextOptions?: ContextOptions): Promise<void>;
stopStream(): Promise<void>;
startContext(appId?: string): Promise<string>;

@@ -18,0 +26,0 @@ stopContext(): Promise<string>;

@@ -1,2 +0,41 @@

declare const _default: "/**\n * Known WebSocket response types.\n * @public\n */\nvar WebsocketResponseType;\n(function (WebsocketResponseType) {\n WebsocketResponseType[\"Opened\"] = \"WEBSOCKET_OPEN\";\n WebsocketResponseType[\"SourceSampleRateSetSuccess\"] = \"SOURCE_SAMPLE_RATE_SET_SUCCESS\";\n WebsocketResponseType[\"Started\"] = \"started\";\n WebsocketResponseType[\"Stopped\"] = \"stopped\";\n})(WebsocketResponseType || (WebsocketResponseType = {}));\nvar CONTROL = {\n WRITE_INDEX: 0,\n FRAMES_AVAILABLE: 1,\n LOCK: 2\n};\nvar WebsocketClient = /** @class */ (function () {\n function WebsocketClient(ctx) {\n var _this = this;\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n this.shouldResendLastFramesSent = false;\n this.buffer = new Float32Array(0);\n this.lastFramesSent = new Int16Array(0); // to re-send after switch context\n this.debug = false;\n this.initialized = false;\n // WebSocket's close handler, called e.g. when\n // - normal close (code 1000)\n // - network unreachable or unable to (re)connect (code 1006)\n // List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code\n this.onWebsocketClose = function (event) {\n if (_this.debug) {\n console.log('[WebSocketClient]', 'onWebsocketClose');\n }\n _this.websocket.removeEventListener('open', _this.onWebsocketOpen);\n _this.websocket.removeEventListener('message', _this.onWebsocketMessage);\n _this.websocket.removeEventListener('error', _this.onWebsocketError);\n _this.websocket.removeEventListener('close', _this.onWebsocketClose);\n _this.websocket = undefined;\n _this.workerCtx.postMessage({\n type: 'WEBSOCKET_CLOSED',\n code: event.code,\n reason: event.reason,\n wasClean: event.wasClean\n });\n };\n this.onWebsocketOpen = function (_event) {\n if (_this.debug) {\n console.log('[WebSocketClient]', 'websocket opened');\n }\n if (_this.isContextStarted && !_this.isStartContextConfirmed) {\n _this.send(_this.outbox);\n }\n _this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });\n };\n this.onWebsocketError = function (_event) {\n if (_this.debug) {\n console.log('[WebSocketClient]', 'websocket error');\n }\n };\n this.onWebsocketMessage = function (event) {\n var response;\n try {\n response = JSON.parse(event.data);\n }\n catch (e) {\n console.error('[WebSocketClient]', 'error parsing response from the server:', e);\n return;\n }\n if (response.type === WebsocketResponseType.Started) {\n _this.isStartContextConfirmed = true;\n if (_this.shouldResendLastFramesSent) {\n _this.resendLastFrames();\n _this.shouldResendLastFramesSent = false;\n }\n }\n _this.workerCtx.postMessage(response);\n };\n this.workerCtx = ctx;\n }\n WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {\n this.debug = debug;\n if (this.debug) {\n console.log('[WebSocketClient]', 'initialize worker');\n }\n this.apiUrl = apiUrl;\n this.authToken = authToken;\n this.targetSampleRate = targetSampleRate;\n this.initialized = true;\n this.isContextStarted = false;\n this.connect(0);\n };\n WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {\n this.sourceSampleRate = sourceSampleRate;\n this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;\n if (this.debug) {\n console.log('[WebSocketClient]', 'resampleRatio', this.resampleRatio);\n }\n if (this.resampleRatio > 1) {\n this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);\n }\n this.workerCtx.postMessage({ type: 'SOURCE_SAMPLE_RATE_SET_SUCCESS' });\n if (isNaN(this.resampleRatio)) {\n throw Error(\"resampleRatio is NaN source rate is \".concat(this.sourceSampleRate, \" and target rate is \").concat(this.targetSampleRate));\n }\n };\n WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {\n this.controlSAB = new Int32Array(controlSAB);\n this.dataSAB = new Float32Array(dataSAB);\n var audioHandleInterval = this.dataSAB.length / 32; // ms\n if (this.debug) {\n console.log('[WebSocketClient]', 'Audio handle interval', audioHandleInterval, 'ms');\n }\n setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);\n };\n WebsocketClient.prototype.connect = function (timeout) {\n if (timeout === void 0) { timeout = 1000; }\n if (this.debug) {\n console.log('[WebSocketClient]', 'connect in ', timeout / 1000, 'sec');\n }\n setTimeout(this.initializeWebsocket.bind(this), timeout);\n };\n WebsocketClient.prototype.initializeWebsocket = function () {\n if (this.debug) {\n console.log('[WebSocketClient]', 'connecting to ', this.apiUrl);\n }\n this.websocket = new WebSocket(this.apiUrl, this.authToken);\n this.websocket.addEventListener('open', this.onWebsocketOpen);\n this.websocket.addEventListener('message', this.onWebsocketMessage);\n this.websocket.addEventListener('error', this.onWebsocketError);\n this.websocket.addEventListener('close', this.onWebsocketClose);\n };\n WebsocketClient.prototype.isOpen = function () {\n return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;\n };\n WebsocketClient.prototype.resendLastFrames = function () {\n if (this.lastFramesSent.length > 0) {\n this.send(this.lastFramesSent);\n this.lastFramesSent = new Int16Array(0);\n }\n };\n WebsocketClient.prototype.sendAudio = function (audioChunk) {\n if (!this.isContextStarted) {\n return;\n }\n if (audioChunk.length > 0) {\n if (this.resampleRatio > 1) {\n // Downsampling\n this.send(this.downsample(audioChunk));\n }\n else {\n this.send(float32ToInt16(audioChunk));\n }\n }\n };\n WebsocketClient.prototype.sendAudioFromSAB = function () {\n if (!this.isContextStarted) {\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n return;\n }\n if (this.controlSAB == undefined) {\n return;\n }\n var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];\n var lock = this.controlSAB[CONTROL.LOCK];\n if (lock == 0 && framesAvailable > 0) {\n var data = this.dataSAB.subarray(0, framesAvailable);\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;\n this.controlSAB[CONTROL.WRITE_INDEX] = 0;\n if (data.length > 0) {\n var frames_1;\n if (this.resampleRatio > 1) {\n frames_1 = this.downsample(data);\n }\n else {\n frames_1 = float32ToInt16(data);\n }\n this.send(frames_1);\n // 16000 per second, 1000 in 100 ms\n // save last 250 ms\n if (this.lastFramesSent.length > 1024 * 4) {\n this.lastFramesSent = frames_1;\n }\n else {\n var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);\n concat.set(this.lastFramesSent);\n concat.set(frames_1, this.lastFramesSent.length);\n this.lastFramesSent = concat;\n }\n }\n }\n };\n WebsocketClient.prototype.startContext = function (appId) {\n if (this.isContextStarted) {\n console.error('[WebSocketClient]', \"can't start context: active context exists\");\n return;\n }\n this.isContextStarted = true;\n this.isStartContextConfirmed = false;\n if (appId !== undefined) {\n this.outbox = JSON.stringify({ event: 'start', appId: appId });\n }\n else {\n this.outbox = JSON.stringify({ event: 'start' });\n }\n this.send(this.outbox);\n };\n WebsocketClient.prototype.stopContext = function () {\n if (!this.websocket) {\n throw Error('WebSocket is undefined');\n }\n if (!this.isContextStarted) {\n console.error('[WebSocketClient]', \"can't stop context: no active context\");\n return;\n }\n this.isContextStarted = false;\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n };\n WebsocketClient.prototype.switchContext = function (newAppId) {\n if (!this.websocket) {\n throw Error('WebSocket is undefined');\n }\n if (!this.isContextStarted) {\n console.error('[WebSocketClient]', \"can't switch context: no active context\");\n return;\n }\n if (newAppId == undefined) {\n console.error('[WebSocketClient]', \"can't switch context: new app id is undefined\");\n return;\n }\n this.isStartContextConfirmed = false;\n var StopEventJSON = JSON.stringify({ event: 'stop' });\n this.send(StopEventJSON);\n this.shouldResendLastFramesSent = true;\n this.send(JSON.stringify({ event: 'start', appId: newAppId }));\n };\n WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {\n if (websocketCode === void 0) { websocketCode = 1005; }\n if (reason === void 0) { reason = 'No Status Received'; }\n if (this.debug) {\n console.log('[WebSocketClient]', 'Websocket closing');\n }\n if (!this.websocket) {\n throw Error('WebSocket is undefined');\n }\n this.websocket.close(websocketCode, reason);\n };\n WebsocketClient.prototype.downsample = function (input) {\n var inputBuffer = new Float32Array(this.buffer.length + input.length);\n inputBuffer.set(this.buffer, 0);\n inputBuffer.set(input, this.buffer.length);\n var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);\n var outputBuffer = new Int16Array(outputLength);\n for (var i = 0; i < outputLength; i++) {\n var offset = Math.round(this.resampleRatio * i);\n var val = 0.0;\n for (var j = 0; j < this.filter.length; j++) {\n val += inputBuffer[offset + j] * this.filter[j];\n }\n outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);\n }\n var remainingOffset = Math.round(this.resampleRatio * outputLength);\n if (remainingOffset < inputBuffer.length) {\n this.buffer = inputBuffer.subarray(remainingOffset);\n }\n else {\n this.buffer = new Float32Array(0);\n }\n return outputBuffer;\n };\n WebsocketClient.prototype.send = function (data) {\n if (this.isOpen()) {\n try {\n this.websocket.send(data);\n }\n catch (error) {\n console.log('[WebSocketClient]', 'server connection error', error);\n }\n }\n };\n return WebsocketClient;\n}());\nvar ctx = self;\nvar websocketClient = new WebsocketClient(ctx);\nctx.onmessage = function (e) {\n switch (e.data.type) {\n case 'INIT':\n websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);\n break;\n case 'SET_SOURCE_SAMPLE_RATE':\n websocketClient.setSourceSampleRate(e.data.sourceSampleRate);\n break;\n case 'SET_SHARED_ARRAY_BUFFERS':\n websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);\n break;\n case 'CLOSE':\n websocketClient.closeWebsocket(1000, 'Close requested by client');\n break;\n case 'START_CONTEXT':\n websocketClient.startContext(e.data.appId);\n break;\n case 'SWITCH_CONTEXT':\n websocketClient.switchContext(e.data.appId);\n break;\n case 'STOP_CONTEXT':\n websocketClient.stopContext();\n break;\n case 'AUDIO':\n websocketClient.sendAudio(e.data.payload);\n break;\n default:\n console.log('WORKER', e);\n }\n};\nfunction float32ToInt16(buffer) {\n var buf = new Int16Array(buffer.length);\n for (var l = 0; l < buffer.length; l++) {\n buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);\n }\n return buf;\n}\nfunction generateFilter(sourceSampleRate, targetSampleRate, length) {\n if (length % 2 === 0) {\n throw Error('Filter length must be odd');\n }\n var cutoff = targetSampleRate / 2;\n var filter = new Float32Array(length);\n var sum = 0;\n for (var i = 0; i < length; i++) {\n var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));\n sum += x;\n filter[i] = x;\n }\n for (var i = 0; i < length; i++) {\n filter[i] = filter[i] / sum;\n }\n return filter;\n}\nfunction sinc(x) {\n if (x === 0.0) {\n return 1.0;\n }\n var piX = Math.PI * x;\n return Math.sin(piX) / piX;\n}\n";
export default _default;
import { AudioProcessorParameters, ContextOptions, VadOptions } from '../client';
declare class WebsocketClient {
private readonly workerCtx;
private targetSampleRate;
private isContextStarted;
private websocket?;
private audioProcessor?;
private controlSAB?;
private dataSAB?;
private readonly frameMillis;
private readonly outputAudioFrame;
private debug;
private defaultContextOptions?;
constructor(ctx: Worker);
connect(apiUrl: string, authToken: string, targetSampleRate: number, debug: boolean): void;
initAudioProcessor(sourceSampleRate: number, vadOptions?: VadOptions): void;
/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void;
setSharedArrayBuffers(controlSAB: number, dataSAB: number): void;
startStream(defaultContextOptions?: ContextOptions): void;
stopStream(): void;
/**
* Processes and sends audio
* @param audioChunk - audio data to process
*/
processAudio(audioChunk: Float32Array): void;
processAudioSAB(): void;
startContext(appId?: string): void;
stopContext(): void;
switchContext(newAppId: string): void;
closeWebsocket(websocketCode?: number, reason?: string): void;
private readonly onWebsocketClose;
private readonly onWebsocketOpen;
private readonly onWebsocketError;
private readonly onWebsocketMessage;
send(data: string | Int16Array): void;
}
export default WebsocketClient;
{
"name": "@speechly/browser-client",
"version": "2.0.1",
"version": "2.1.0-beta.0",
"description": "Browser client for Speechly API",

@@ -24,13 +24,2 @@ "keywords": [

],
"scripts": {
"build": "pnpm run lint && pnpm run test 2>&1 && rm -rf ./dist/ && pnpm run buildworker && pnpx rollup -c --silent",
"build:watch": "rm -rf ./dist/ && pnpm run buildworker && pnpx rollup -c --silent",
"buildworker": "pnpx tsc ./worker/worker.ts && cat ./worker/templateOpen > ./src/websocket/worker.ts && cat ./worker/worker.js >> ./src/websocket/worker.ts && cat ./worker/templateEnd >> ./src/websocket/worker.ts",
"check": "pnpm run build && pnpx api-extractor run --verbose",
"docs": "rimraf docs && pnpx typedoc --readme none --excludeExternals --excludePrivate --excludeProtected --out ./docs/ --entryPointStrategy expand --sort required-first --disableSources ./src/",
"getdeps": "pnpm install --force --frozen-lockfile",
"lint": "pnpx eslint --cache --max-warnings 0 'src/**/*.{ts,tsx}'",
"precommit": "pnpx prettier --write src/**/*.ts && pnpm run build && pnpx api-extractor run --local && pnpm run docs",
"test": "pnpx jest --config ./config/jest.config.js"
},
"repository": {

@@ -44,3 +33,6 @@ "type": "git",

},
"files": ["core/**/*", "src/**/*"],
"files": [
"core/**/*",
"src/**/*"
],
"main": "./core/speechly.umd.min.js",

@@ -85,3 +77,4 @@ "module": "./core/speechly.es.js",

"typedoc-plugin-markdown": "^3.11.3",
"@rollup/plugin-typescript": "~8.3.2"
"@rollup/plugin-typescript": "~8.3.2",
"rollup-plugin-web-worker-loader": "~1.6.1"
},

@@ -93,3 +86,13 @@ "publishConfig": {

"**/optimist/minimist": "0.2.1"
},
"scripts": {
"build": "pnpm run lint && pnpm run test 2>&1 && rm -rf ./dist/ && pnpx rollup -c --silent",
"build:watch": "rm -rf ./dist/ && pnpx rollup -c --silent",
"check": "pnpm run build && pnpx api-extractor run --verbose",
"docs": "rimraf docs && pnpx typedoc --readme none --excludeExternals --excludePrivate --excludeProtected --out ./docs/ --entryPointStrategy expand --sort required-first --disableSources ./src/",
"getdeps": "pnpm install --force --frozen-lockfile",
"lint": "pnpx eslint --cache --max-warnings 0 'src/**/*.{ts,tsx}'",
"precommit": "pnpx prettier --write src/**/*.ts && pnpm run build && pnpx api-extractor run --local && pnpm run docs",
"test": "pnpx jest --config ./config/jest.config.js"
}
}
}

@@ -1,2 +0,2 @@

import { DecoderState, EventCallbacks, DecoderOptions, ContextOptions } from './types'
import { DecoderState, EventCallbacks, DecoderOptions, ContextOptions, VadOptions, VadDefaultOptions, AudioProcessorParameters } from './types'
import { CloudDecoder } from './decoder'

@@ -22,3 +22,6 @@ import { ErrDeviceNotSupported, DefaultSampleRate, Segment, Word, Entity, Intent } from '../speechly'

private readonly vadOptions?: VadOptions
private initialized: boolean = false
private isStreaming: boolean = false
private isStreamAutoStarted: boolean = false
private active: boolean = false

@@ -28,2 +31,3 @@ private speechlyNode?: AudioWorkletNode

private stream?: MediaStreamAudioSourceNode
private listeningPromise: Promise<any> | null = null

@@ -48,5 +52,7 @@ private stats = {

this.useSAB = !this.isSafari
this.vadOptions = { ...VadDefaultOptions, ...options.vad }
this.debug = options.debug ?? true
this.callbacks = new EventCallbacks()
this.callbacks.onVadStateChange.push(this.onVadStateChange.bind(this))
this.decoder = options.decoder ?? new CloudDecoder(options)

@@ -56,6 +62,19 @@ this.decoder.registerListener(this.callbacks)

onVadStateChange(active: boolean): void {
if (this.debug) {
console.log('[BrowserClient]', 'onVadStateChange', active)
}
if (this.vadOptions?.controlListening) {
if (active) {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
if (!this.active) this.start()
} else {
// eslint-disable-next-line @typescript-eslint/no-floating-promises
if (this.active) this.stop()
}
}
}
/**
* Create an AudioContext for resampling audio.
*
* @param options - shorthand for attaching to existing mediaStream
*/

@@ -177,3 +196,9 @@ async initialize(options?: { mediaStream?: MediaStream }): Promise<void> {

}
await this.decoder.setSampleRate(this.audioContext?.sampleRate)
await this.decoder.initAudioProcessor(this.audioContext?.sampleRate, this.vadOptions)
// Auto-start stream if VAD is defined
if (this.vadOptions) {
await this.startStream()
}
if (options?.mediaStream) {

@@ -185,2 +210,10 @@ await this.attach(options?.mediaStream)

/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void {
this.decoder.adjustAudioProcessor(ap)
}
/**
* Closes the client, detaching from any audio source and disconnecting any audio

@@ -263,4 +296,12 @@ * processors.

const contextId = await this.start(options)
await this.startStream({ immediate: true })
let contextId: string
const vadActive = this.vadOptions?.enabled && this.vadOptions?.controlListening
if (!vadActive) {
contextId = await this.start(options)
} else {
contextId = 'multiple context ids'
}
let sendBuffer: Float32Array

@@ -277,3 +318,8 @@ for (let b = 0; b < samples.length; b += 16000) {

await this.stop()
if (!vadActive) {
await this.stop()
}
await this.stopStream()
return contextId

@@ -283,2 +329,31 @@ }

/**
* If the application starts and resumes the flow of audio, `startStream` should be called at start of a continuous audio stream.
* If you're using VAD that controls starting and stopping audio contexts automatically, you can pass optional inference time options.
* It resets the stream sample counters and history.
*/
async startStream(defaultContextOptions?: ContextOptions): Promise<void> {
await this.decoder.startStream(defaultContextOptions)
this.isStreaming = true
}
/**
* If the application starts and resumes the flow of audio, `stopStream` should be called at the end of a continuous audio stream.
* It ensures that all of the internal audio buffers are flushed for processing.
*/
async stopStream(): Promise<void> {
await this.decoder.stopStream()
this.isStreaming = false
this.isStreamAutoStarted = false
}
private async queueTask(task: () => Promise<any>): Promise<any> {
const prevTask = this.listeningPromise
this.listeningPromise = (async () => {
await prevTask
return task()
})()
return this.listeningPromise
}
/**
* Starts a new audio context, returning it's id to use for matching received responses.

@@ -291,6 +366,14 @@ * If an active context already exists, an error is thrown.

async start(options?: ContextOptions): Promise<string> {
await this.initialize()
const startPromise = this.decoder.startContext(options)
this.active = true
return startPromise
const promise = await this.queueTask(async () => {
await this.initialize()
if (!this.isStreaming) {
// Automatically control streaming for backwards compability
await this.startStream()
this.isStreamAutoStarted = true
}
const startPromise = this.decoder.startContext(options)
this.active = true
return startPromise
})
return promise
}

@@ -305,14 +388,22 @@

async stop(): Promise<string | null> {
let contextId = null
try {
contextId = await this.decoder.stopContext()
if (this.stats.sentSamples === 0) {
console.warn('[BrowserClient]', 'audioContext contained no audio data')
const contextId = await this.queueTask(async () => {
let contextId = null
try {
contextId = await this.decoder.stopContext()
if (this.isStreaming && this.isStreamAutoStarted) {
// Automatically control streaming for backwards compability
await this.stopStream()
}
if (this.stats.sentSamples === 0) {
console.warn('[BrowserClient]', 'audioContext contained no audio data')
}
} catch (err) {
console.warn('[BrowserClient]', 'stop() failed', err)
} finally {
this.active = false
this.stats.sentSamples = 0
}
} catch (err) {
console.warn('[BrowserClient]', 'stop() failed', err)
} finally {
this.active = false
this.stats.sentSamples = 0
}
return contextId
})
return contextId

@@ -322,8 +413,6 @@ }

private handleAudio(array: Float32Array): void {
if (!this.active) {
return
}
if (array.length > 0) {
if (this.isStreaming) {
this.stats.sentSamples += array.length
this.decoder.sendAudio(array)
console.log('handleAudio')
}

@@ -330,0 +419,0 @@ }

@@ -17,2 +17,3 @@ import { v4 as uuidv4 } from 'uuid'

IntentResponse,
WorkerSignal,
} from '../websocket'

@@ -22,3 +23,3 @@

import { DecoderOptions, DecoderState, EventCallbacks, ContextOptions } from './types'
import { DecoderOptions, DecoderState, EventCallbacks, ContextOptions, VadOptions, AudioProcessorParameters } from './types'
import { stateToString } from './state'

@@ -55,3 +56,2 @@

private connectPromise: Promise<void> | null = null
private listeningPromise: Promise<any> | null = null

@@ -135,2 +135,10 @@ private authToken?: string

/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void {
this.apiClient.adjustAudioProcessor(ap)
}
/**
* Closes the client by closing the API connection and disabling the microphone.

@@ -157,2 +165,13 @@ */

async startStream(defaultContextOptions?: ContextOptions): Promise<void> {
await this.apiClient.startStream(defaultContextOptions)
}
async stopStream(): Promise<void> {
if (this.state === DecoderState.Active) {
await this.stopContext()
}
await this.apiClient.stopStream()
}
/**

@@ -192,4 +211,2 @@ * Starts a new SLU context by sending a start context event to the API.

this.activeContexts.set(contextId, new Map<number, SegmentState>())
this.cbs.forEach(cb => cb.contextStartedCbs.forEach(f => f(contextId)))
return contextId

@@ -202,9 +219,2 @@ }

sendAudio(audio: Float32Array): void {
if (this.state !== DecoderState.Active) {
throw Error(
'[Decoder] Unable to complete startContext: Expected Active state, but was in ' +
stateToString(this.state) +
'.',
)
}
this.apiClient.sendAudio(audio)

@@ -232,4 +242,2 @@ }

const contextId = await this.apiClient.stopContext()
this.activeContexts.delete(contextId)
this.cbs.forEach(cb => cb.contextStoppedCbs.forEach(f => f(contextId)))
return contextId

@@ -263,5 +271,5 @@ } catch (err) {

async setSampleRate(sr: number): Promise<void> {
this.sampleRate = sr
await this.apiClient.setSourceSampleRate(sr)
async initAudioProcessor(sampleRate: number, vadOptions?: VadOptions): Promise<void> {
this.sampleRate = sampleRate
await this.apiClient.initAudioProcessor(sampleRate, vadOptions)
}

@@ -282,2 +290,26 @@

switch (response.type) {
case WorkerSignal.VadSignalHigh:
this.cbs.forEach(cb => cb.onVadStateChange.forEach(f => f(true)))
break
case WorkerSignal.VadSignalLow:
this.cbs.forEach(cb => cb.onVadStateChange.forEach(f => f(false)))
break
case WebsocketResponseType.Started: {
this.activeContexts.set(response.audio_context, new Map<number, SegmentState>())
this.cbs.forEach(cb => cb.contextStartedCbs.forEach(f => f(response.audio_context)))
break
}
case WebsocketResponseType.Stopped: {
this.activeContexts.delete(response.audio_context)
this.cbs.forEach(cb => cb.contextStoppedCbs.forEach(f => f(response.audio_context)))
break
}
default:
this.handleSegmentUpdate(response)
break
}
}
private readonly handleSegmentUpdate = (response: WebsocketResponse): void => {
const { audio_context, segment_id, type } = response

@@ -369,5 +401,2 @@ let { data } = response

// Reset
this.listeningPromise = null
this.setState(DecoderState.Disconnected)

@@ -374,0 +403,0 @@ // eslint-disable-next-line @typescript-eslint/no-floating-promises

@@ -61,5 +61,89 @@ import { Segment, Word, Entity, Intent } from '../speechly'

storage?: Storage
/**
* Enable voice activity detection (VAD) configuration overrides
*/
vad?: Partial<VadOptions>
}
/**
* Options for voice activity detection (VAD)
* @public
*/
export interface VadOptions {
/**
* Run energy analysis
*/
enabled: boolean
/**
* Signal-to-noise energy ratio needed for frame to be 'loud'.
* Default: 3.0 [dB].
*/
signalToNoiseDb: number
/**
* Energy threshold - below this won't trigger activation.
* Range: -90.0f to 0.0f [dB]. Default: -24 [dB].
*/
noiseGateDb: number
/**
* Rate of background noise learn. Defined as duration in which background noise energy is moved halfway towards current frame's energy.
* Range: 0, 5000 [ms]. Default: 400 [ms].
*/
noiseLearnHalftimeMillis: number
/**
* Number of past frames analyzed for energy threshold VAD. Should be less or equal than HistoryFrames.
* Range: 1 to 32 [frames]. Default: 5 [frames].
*/
signalSearchFrames: number
/**
* Minimum 'signal' to 'silent' frame ratio in history to activate 'IsSignalDetected'
* Range: 0.0 to 1.0. Default: 0.7.
*/
signalActivation: number
/**
* Maximum 'signal' to 'silent' frame ratio in history to inactivate 'IsSignalDetected'. Only evaluated when the sustain period is over.
* Range: 0.0 to 1.0. Default: 0.2.
*/
signalRelease: number
/**
* Duration to keep 'IsSignalDetected' active. Renewed as long as VADActivation is holds true.
* Range: 0 to 8000 [ms]. Default: 3000 [ms].
*/
signalSustainMillis: number
/**
* Enable listening control if you want to use IsSignalDetected to control SLU start / stop.
* Default: true.
*/
controlListening: boolean
}
export interface AudioProcessorParameters {
vad?: Partial<VadOptions>
}
/**
* Default options for voice activity detection (VAD)
* @public
*/
export const VadDefaultOptions: VadOptions = {
enabled: false,
controlListening: true,
signalToNoiseDb: 3.0,
noiseGateDb: -24.0,
noiseLearnHalftimeMillis: 400,
signalSearchFrames: 5,
signalActivation: 0.7,
signalRelease: 0.2,
signalSustainMillis: 3000,
}
/**
* All possible states of a Speechly API client. Failed state is non-recoverable.

@@ -93,2 +177,3 @@ * It is also possible to use arithmetics for state comparison, e.g. `if (state < speechly.ClientState.Disconnected)`,

contextStoppedCbs: Array<(contextId: string) => void> = []
onVadStateChange: Array<(active: boolean) => void> = []
}

@@ -102,2 +187,8 @@

appId?: string
/**
* BrowserClient.uploadAudioData internally uses this to set audio worker
* to ‘immediate audio processor’ mode where it can control start/stop context at its own pace.
*/
immediate?: boolean
}
interface Window {
webkitAudioContext: typeof AudioContext
}
declare module 'web-worker:*' {
const WorkerFactory: new () => Worker
export default WorkerFactory
}

@@ -14,3 +14,3 @@ import { ErrDeviceNotSupported, DefaultSampleRate } from '../speechly'

private readonly nativeResamplingSupported: boolean
private readonly autoGainControl: boolean
private readonly autoGainControlSupported: boolean

@@ -24,6 +24,6 @@ // The media stream and audio track are initialized during `initialize()` call.

this.nativeResamplingSupported = constraints.sampleRate === true
this.autoGainControl = constraints.autoGainControl === true
this.autoGainControlSupported = constraints.autoGainControl === true
} catch {
this.nativeResamplingSupported = false
this.autoGainControl = false
this.autoGainControlSupported = false
}

@@ -51,7 +51,7 @@ }

if (this.nativeResamplingSupported || this.autoGainControl) {
if (this.nativeResamplingSupported || this.autoGainControlSupported) {
mediaStreamConstraints.audio = {
sampleRate: DefaultSampleRate,
// @ts-ignore
autoGainControl: this.autoGainControl,
autoGainControl: this.autoGainControlSupported,
}

@@ -58,0 +58,0 @@ } else {

@@ -0,1 +1,3 @@

import { AudioProcessorParameters, ContextOptions, VadOptions } from '../client'
/**

@@ -9,3 +11,3 @@ * The interface for response returned by WebSocket client.

*/
type: WebsocketResponseType
type: WebsocketResponseType | WorkerSignal

@@ -37,5 +39,2 @@ /**

export enum WebsocketResponseType {
Opened = 'WEBSOCKET_OPEN',
Closed = 'WEBSOCKET_CLOSED',
SourceSampleRateSetSuccess = 'SOURCE_SAMPLE_RATE_SET_SUCCESS',
Started = 'started',

@@ -53,2 +52,32 @@ Stopped = 'stopped',

/**
* Messages from worker to controller
* @public
*/
export enum WorkerSignal {
Opened = 'WEBSOCKET_OPEN',
Closed = 'WEBSOCKET_CLOSED',
AudioProcessorReady = 'SOURCE_SAMPLE_RATE_SET_SUCCESS',
VadSignalHigh = 'VadSignalHigh',
VadSignalLow = 'VadSignalLow',
}
/**
* Messages from controller to worker
* @public
*/
export enum ControllerSignal {
connect = 'connect',
initAudioProcessor = 'initAudioProcessor',
adjustAudioProcessor = 'adjustAudioProcessor',
SET_SHARED_ARRAY_BUFFERS = 'SET_SHARED_ARRAY_BUFFERS',
CLOSE = 'CLOSE',
START_CONTEXT = 'START_CONTEXT',
SWITCH_CONTEXT = 'SWITCH_CONTEXT',
STOP_CONTEXT = 'STOP_CONTEXT',
AUDIO = 'AUDIO',
startStream = 'startStream',
stopStream = 'stopStream',
}
/**
* Transcript response payload.

@@ -197,5 +226,11 @@ * @public

*/
setSourceSampleRate(sourceSampleRate: number): Promise<void>
initAudioProcessor(sourceSampleRate: number, vadOptions?: VadOptions): Promise<void>
/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void
/**
* Closes the client.

@@ -240,2 +275,7 @@ *

postMessage(message: Object): void
startStream(defaultContextOptions?: ContextOptions): Promise<void>
stopStream(): Promise<void>
}

@@ -1,3 +0,4 @@

import { APIClient, ResponseCallback, CloseCallback, WebsocketResponse, WebsocketResponseType } from './types'
import worker from './worker'
import { APIClient, ResponseCallback, CloseCallback, WebsocketResponse, WebsocketResponseType, WorkerSignal, ControllerSignal } from './types'
import WebsocketClient from 'web-worker:./worker'
import { AudioProcessorParameters, ContextOptions, VadOptions } from '../client'

@@ -25,5 +26,5 @@ type ContextCallback = (err?: Error, contextId?: string) => void

constructor() {
const blob = new Blob([worker], { type: 'text/javascript' })
const blobURL = window.URL.createObjectURL(blob)
this.worker = new Worker(blobURL)
// const blob = new Blob([worker], { type: 'text/javascript' })
// const blobURL = window.URL.createObjectURL(blob)
this.worker = new WebsocketClient()
this.worker.addEventListener('message', this.onWebsocketMessage)

@@ -34,3 +35,3 @@ }

this.worker.postMessage({
type: 'INIT',
type: ControllerSignal.connect,
apiUrl,

@@ -51,6 +52,7 @@ authToken,

async setSourceSampleRate(sourceSampleRate: number): Promise<void> {
async initAudioProcessor(sourceSampleRate: number, vadOptions?: VadOptions): Promise<void> {
this.worker.postMessage({
type: 'SET_SOURCE_SAMPLE_RATE',
sourceSampleRate,
type: ControllerSignal.initAudioProcessor,
sourceSampleRate: sourceSampleRate,
vadOptions: vadOptions,
})

@@ -63,6 +65,17 @@

/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void {
this.worker.postMessage({
type: ControllerSignal.adjustAudioProcessor,
params: ap,
})
}
async close(): Promise<void> {
return new Promise((resolve, reject) => {
this.worker.postMessage({
type: 'CLOSE',
type: ControllerSignal.CLOSE,
code: 1000,

@@ -75,2 +88,10 @@ message: 'Client has ended the session',

async startStream(defaultContextOptions?: ContextOptions): Promise<void> {
this.worker.postMessage({ type: ControllerSignal.startStream, options: defaultContextOptions })
}
async stopStream(): Promise<void> {
this.worker.postMessage({ type: ControllerSignal.stopStream })
}
async startContext(appId?: string): Promise<string> {

@@ -86,5 +107,5 @@ return new Promise((resolve, reject) => {

if (appId != null) {
this.worker.postMessage({ type: 'START_CONTEXT', appId })
this.worker.postMessage({ type: ControllerSignal.START_CONTEXT, appId })
} else {
this.worker.postMessage({ type: 'START_CONTEXT' })
this.worker.postMessage({ type: ControllerSignal.START_CONTEXT })
}

@@ -104,3 +125,3 @@ })

this.worker.postMessage({ type: 'STOP_CONTEXT' })
this.worker.postMessage({ type: ControllerSignal.STOP_CONTEXT })
})

@@ -118,3 +139,3 @@ }

})
this.worker.postMessage({ type: 'SWITCH_CONTEXT', appId })
this.worker.postMessage({ type: ControllerSignal.SWITCH_CONTEXT, appId })
})

@@ -128,3 +149,3 @@ }

sendAudio(audioChunk: Float32Array): void {
this.worker.postMessage({ type: 'AUDIO', payload: audioChunk })
this.worker.postMessage({ type: ControllerSignal.AUDIO, payload: audioChunk })
}

@@ -135,3 +156,3 @@

switch (response.type) {
case WebsocketResponseType.Opened:
case WorkerSignal.Opened:
if (this.resolveInitialization != null) {

@@ -141,3 +162,3 @@ this.resolveInitialization()

break
case WebsocketResponseType.Closed:
case WorkerSignal.Closed:
this.onCloseCb({

@@ -149,3 +170,3 @@ code: event.data.code,

break
case WebsocketResponseType.SourceSampleRateSetSuccess:
case WorkerSignal.AudioProcessorReady:
if (this.resolveSourceSampleRateSet != null) {

@@ -156,2 +177,3 @@ this.resolveSourceSampleRateSet()

case WebsocketResponseType.Started:
this.onResponseCb(response)
this.startCbs.forEach(cb => {

@@ -167,2 +189,3 @@ try {

case WebsocketResponseType.Stopped:
this.onResponseCb(response)
this.stopCbs.forEach(cb => {

@@ -169,0 +192,0 @@ try {

@@ -1,349 +0,383 @@

export default `/**
* Known WebSocket response types.
import AudioProcessor from '../audioprocessing/AudioProcessor'
import EnergyTresholdVAD from '../audioprocessing/EnergyTresholdVAD'
import AudioTools from '../audioprocessing/AudioTools'
import { ControllerSignal, WebsocketResponseType, WorkerSignal } from './types'
import { AudioProcessorParameters, ContextOptions, VadOptions } from '../client'
/**
* The interface for response returned by WebSocket client.
* @public
*/
var WebsocketResponseType;
(function (WebsocketResponseType) {
WebsocketResponseType["Opened"] = "WEBSOCKET_OPEN";
WebsocketResponseType["SourceSampleRateSetSuccess"] = "SOURCE_SAMPLE_RATE_SET_SUCCESS";
WebsocketResponseType["Started"] = "started";
WebsocketResponseType["Stopped"] = "stopped";
})(WebsocketResponseType || (WebsocketResponseType = {}));
var CONTROL = {
WRITE_INDEX: 0,
FRAMES_AVAILABLE: 1,
LOCK: 2
};
var WebsocketClient = /** @class */ (function () {
function WebsocketClient(ctx) {
var _this = this;
this.isContextStarted = false;
this.isStartContextConfirmed = false;
this.shouldResendLastFramesSent = false;
this.buffer = new Float32Array(0);
this.lastFramesSent = new Int16Array(0); // to re-send after switch context
this.debug = false;
this.initialized = false;
// WebSocket's close handler, called e.g. when
// - normal close (code 1000)
// - network unreachable or unable to (re)connect (code 1006)
// List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code
this.onWebsocketClose = function (event) {
if (_this.debug) {
console.log('[WebSocketClient]', 'onWebsocketClose');
}
_this.websocket.removeEventListener('open', _this.onWebsocketOpen);
_this.websocket.removeEventListener('message', _this.onWebsocketMessage);
_this.websocket.removeEventListener('error', _this.onWebsocketError);
_this.websocket.removeEventListener('close', _this.onWebsocketClose);
_this.websocket = undefined;
_this.workerCtx.postMessage({
type: 'WEBSOCKET_CLOSED',
code: event.code,
reason: event.reason,
wasClean: event.wasClean
});
};
this.onWebsocketOpen = function (_event) {
if (_this.debug) {
console.log('[WebSocketClient]', 'websocket opened');
}
if (_this.isContextStarted && !_this.isStartContextConfirmed) {
_this.send(_this.outbox);
}
_this.workerCtx.postMessage({ type: 'WEBSOCKET_OPEN' });
};
this.onWebsocketError = function (_event) {
if (_this.debug) {
console.log('[WebSocketClient]', 'websocket error');
}
};
this.onWebsocketMessage = function (event) {
var response;
try {
response = JSON.parse(event.data);
}
catch (e) {
console.error('[WebSocketClient]', 'error parsing response from the server:', e);
return;
}
if (response.type === WebsocketResponseType.Started) {
_this.isStartContextConfirmed = true;
if (_this.shouldResendLastFramesSent) {
_this.resendLastFrames();
_this.shouldResendLastFramesSent = false;
}
}
_this.workerCtx.postMessage(response);
};
this.workerCtx = ctx;
interface WebsocketResponse {
/**
* Response type.
*/
type: WebsocketResponseType
/**
* Audio context ID.
*/
audio_context: string
/**
* Segment ID.
*/
segment_id: number
/**
* Response payload.
*
* The payload value should match the response type (i.e. TranscriptResponse should have Transcript type).
* Not all response types have payloads - Started, Stopped and SegmentEnd don't have payloads.
* TentativeIntent and Intent share the same payload interface (IntentResponse).
*/
data: any
}
const CONTROL = {
WRITE_INDEX: 0,
FRAMES_AVAILABLE: 1,
LOCK: 2,
}
class WebsocketClient {
private readonly workerCtx: Worker
private targetSampleRate: number = 16000
private isContextStarted: boolean = false
private websocket?: WebSocket
private audioProcessor?: AudioProcessor
private controlSAB?: Int32Array
private dataSAB?: Float32Array
private readonly frameMillis = 30
private readonly outputAudioFrame: Int16Array = new Int16Array(this.frameMillis * this.targetSampleRate / 1000)
private debug: boolean = false
private defaultContextOptions?: ContextOptions
constructor(ctx: Worker) {
this.workerCtx = ctx
}
connect(apiUrl: string, authToken: string, targetSampleRate: number, debug: boolean): void {
this.debug = debug
if (this.debug) {
console.log('[WebSocketClient]', 'connecting to ', apiUrl)
}
WebsocketClient.prototype.init = function (apiUrl, authToken, targetSampleRate, debug) {
this.debug = debug;
if (this.debug) {
console.log('[WebSocketClient]', 'initialize worker');
}
this.apiUrl = apiUrl;
this.authToken = authToken;
this.targetSampleRate = targetSampleRate;
this.initialized = true;
this.isContextStarted = false;
this.connect(0);
};
WebsocketClient.prototype.setSourceSampleRate = function (sourceSampleRate) {
this.sourceSampleRate = sourceSampleRate;
this.resampleRatio = this.sourceSampleRate / this.targetSampleRate;
if (this.debug) {
console.log('[WebSocketClient]', 'resampleRatio', this.resampleRatio);
}
if (this.resampleRatio > 1) {
this.filter = generateFilter(this.sourceSampleRate, this.targetSampleRate, 127);
}
this.workerCtx.postMessage({ type: 'SOURCE_SAMPLE_RATE_SET_SUCCESS' });
if (isNaN(this.resampleRatio)) {
throw Error("resampleRatio is NaN source rate is ".concat(this.sourceSampleRate, " and target rate is ").concat(this.targetSampleRate));
}
};
WebsocketClient.prototype.setSharedArrayBuffers = function (controlSAB, dataSAB) {
this.controlSAB = new Int32Array(controlSAB);
this.dataSAB = new Float32Array(dataSAB);
var audioHandleInterval = this.dataSAB.length / 32; // ms
if (this.debug) {
console.log('[WebSocketClient]', 'Audio handle interval', audioHandleInterval, 'ms');
}
setInterval(this.sendAudioFromSAB.bind(this), audioHandleInterval);
};
WebsocketClient.prototype.connect = function (timeout) {
if (timeout === void 0) { timeout = 1000; }
if (this.debug) {
console.log('[WebSocketClient]', 'connect in ', timeout / 1000, 'sec');
}
setTimeout(this.initializeWebsocket.bind(this), timeout);
};
WebsocketClient.prototype.initializeWebsocket = function () {
if (this.debug) {
console.log('[WebSocketClient]', 'connecting to ', this.apiUrl);
}
this.websocket = new WebSocket(this.apiUrl, this.authToken);
this.websocket.addEventListener('open', this.onWebsocketOpen);
this.websocket.addEventListener('message', this.onWebsocketMessage);
this.websocket.addEventListener('error', this.onWebsocketError);
this.websocket.addEventListener('close', this.onWebsocketClose);
};
WebsocketClient.prototype.isOpen = function () {
return this.websocket !== undefined && this.websocket.readyState === this.websocket.OPEN;
};
WebsocketClient.prototype.resendLastFrames = function () {
if (this.lastFramesSent.length > 0) {
this.send(this.lastFramesSent);
this.lastFramesSent = new Int16Array(0);
}
};
WebsocketClient.prototype.sendAudio = function (audioChunk) {
if (!this.isContextStarted) {
return;
}
if (audioChunk.length > 0) {
if (this.resampleRatio > 1) {
// Downsampling
this.send(this.downsample(audioChunk));
this.targetSampleRate = targetSampleRate
this.isContextStarted = false
this.websocket = new WebSocket(apiUrl, authToken)
this.websocket.addEventListener('open', this.onWebsocketOpen)
this.websocket.addEventListener('message', this.onWebsocketMessage)
this.websocket.addEventListener('error', this.onWebsocketError)
this.websocket.addEventListener('close', this.onWebsocketClose)
}
initAudioProcessor(sourceSampleRate: number, vadOptions?: VadOptions): void {
this.audioProcessor = new AudioProcessor(sourceSampleRate, this.targetSampleRate, 5)
if (vadOptions) {
this.audioProcessor.vad = new EnergyTresholdVAD(vadOptions)
this.audioProcessor.onVadSignalHigh = () => {
const currentVadOptions = this.audioProcessor?.vad?.vadOptions
if (currentVadOptions) {
if (this.defaultContextOptions?.immediate) {
if (currentVadOptions.enabled && currentVadOptions.controlListening) {
this.startContext()
}
else {
this.send(float32ToInt16(audioChunk));
} else {
if (this.audioProcessor?.vad?.vadOptions.enabled) {
if (currentVadOptions.enabled && currentVadOptions.controlListening) {
this.workerCtx.postMessage({ type: WorkerSignal.VadSignalHigh })
}
}
}
}
};
WebsocketClient.prototype.sendAudioFromSAB = function () {
if (!this.isContextStarted) {
this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;
this.controlSAB[CONTROL.WRITE_INDEX] = 0;
return;
}
if (this.controlSAB == undefined) {
return;
}
var framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE];
var lock = this.controlSAB[CONTROL.LOCK];
if (lock == 0 && framesAvailable > 0) {
var data = this.dataSAB.subarray(0, framesAvailable);
this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0;
this.controlSAB[CONTROL.WRITE_INDEX] = 0;
if (data.length > 0) {
var frames_1;
if (this.resampleRatio > 1) {
frames_1 = this.downsample(data);
}
else {
frames_1 = float32ToInt16(data);
}
this.send(frames_1);
// 16000 per second, 1000 in 100 ms
// save last 250 ms
if (this.lastFramesSent.length > 1024 * 4) {
this.lastFramesSent = frames_1;
}
else {
var concat = new Int16Array(this.lastFramesSent.length + frames_1.length);
concat.set(this.lastFramesSent);
concat.set(frames_1, this.lastFramesSent.length);
this.lastFramesSent = concat;
}
}
this.audioProcessor.onVadSignalLow = () => {
const currentVadOptions = this.audioProcessor?.vad?.vadOptions
if (currentVadOptions) {
if (this.defaultContextOptions?.immediate) {
if (currentVadOptions.enabled && currentVadOptions.controlListening) {
this.stopContext()
}
}
};
WebsocketClient.prototype.startContext = function (appId) {
if (this.isContextStarted) {
console.error('[WebSocketClient]', "can't start context: active context exists");
return;
}
this.isContextStarted = true;
this.isStartContextConfirmed = false;
if (appId !== undefined) {
this.outbox = JSON.stringify({ event: 'start', appId: appId });
}
else {
this.outbox = JSON.stringify({ event: 'start' });
}
this.send(this.outbox);
};
WebsocketClient.prototype.stopContext = function () {
if (!this.websocket) {
throw Error('WebSocket is undefined');
}
if (!this.isContextStarted) {
console.error('[WebSocketClient]', "can't stop context: no active context");
return;
}
this.isContextStarted = false;
this.isStartContextConfirmed = false;
var StopEventJSON = JSON.stringify({ event: 'stop' });
this.send(StopEventJSON);
};
WebsocketClient.prototype.switchContext = function (newAppId) {
if (!this.websocket) {
throw Error('WebSocket is undefined');
}
if (!this.isContextStarted) {
console.error('[WebSocketClient]', "can't switch context: no active context");
return;
}
if (newAppId == undefined) {
console.error('[WebSocketClient]', "can't switch context: new app id is undefined");
return;
}
this.isStartContextConfirmed = false;
var StopEventJSON = JSON.stringify({ event: 'stop' });
this.send(StopEventJSON);
this.shouldResendLastFramesSent = true;
this.send(JSON.stringify({ event: 'start', appId: newAppId }));
};
WebsocketClient.prototype.closeWebsocket = function (websocketCode, reason) {
if (websocketCode === void 0) { websocketCode = 1005; }
if (reason === void 0) { reason = 'No Status Received'; }
if (this.debug) {
console.log('[WebSocketClient]', 'Websocket closing');
}
if (!this.websocket) {
throw Error('WebSocket is undefined');
}
this.websocket.close(websocketCode, reason);
};
WebsocketClient.prototype.downsample = function (input) {
var inputBuffer = new Float32Array(this.buffer.length + input.length);
inputBuffer.set(this.buffer, 0);
inputBuffer.set(input, this.buffer.length);
var outputLength = Math.ceil((inputBuffer.length - this.filter.length) / this.resampleRatio);
var outputBuffer = new Int16Array(outputLength);
for (var i = 0; i < outputLength; i++) {
var offset = Math.round(this.resampleRatio * i);
var val = 0.0;
for (var j = 0; j < this.filter.length; j++) {
val += inputBuffer[offset + j] * this.filter[j];
} else {
if (currentVadOptions.enabled && currentVadOptions.controlListening) {
this.workerCtx.postMessage({ type: WorkerSignal.VadSignalLow })
}
outputBuffer[i] = val * (val < 0 ? 0x8000 : 0x7fff);
}
}
var remainingOffset = Math.round(this.resampleRatio * outputLength);
if (remainingOffset < inputBuffer.length) {
this.buffer = inputBuffer.subarray(remainingOffset);
}
else {
this.buffer = new Float32Array(0);
}
return outputBuffer;
};
WebsocketClient.prototype.send = function (data) {
if (this.isOpen()) {
try {
this.websocket.send(data);
}
catch (error) {
console.log('[WebSocketClient]', 'server connection error', error);
}
}
};
return WebsocketClient;
}());
var ctx = self;
var websocketClient = new WebsocketClient(ctx);
ctx.onmessage = function (e) {
switch (e.data.type) {
case 'INIT':
websocketClient.init(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug);
break;
case 'SET_SOURCE_SAMPLE_RATE':
websocketClient.setSourceSampleRate(e.data.sourceSampleRate);
break;
case 'SET_SHARED_ARRAY_BUFFERS':
websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB);
break;
case 'CLOSE':
websocketClient.closeWebsocket(1000, 'Close requested by client');
break;
case 'START_CONTEXT':
websocketClient.startContext(e.data.appId);
break;
case 'SWITCH_CONTEXT':
websocketClient.switchContext(e.data.appId);
break;
case 'STOP_CONTEXT':
websocketClient.stopContext();
break;
case 'AUDIO':
websocketClient.sendAudio(e.data.payload);
break;
default:
console.log('WORKER', e);
}
}
};
function float32ToInt16(buffer) {
var buf = new Int16Array(buffer.length);
for (var l = 0; l < buffer.length; l++) {
buf[l] = buffer[l] * (buffer[l] < 0 ? 0x8000 : 0x7fff);
this.audioProcessor.sendAudio = (floats: Float32Array, startIndex: number, length: number) => {
AudioTools.convertFloatToInt16(floats, this.outputAudioFrame, startIndex, length)
this.send(this.outputAudioFrame)
}
return buf;
}
function generateFilter(sourceSampleRate, targetSampleRate, length) {
if (length % 2 === 0) {
throw Error('Filter length must be odd');
this.workerCtx.postMessage({ type: WorkerSignal.AudioProcessorReady })
}
/**
* Control audio processor parameters
* @param ap - Audio processor parameters to adjust
*/
adjustAudioProcessor(ap: AudioProcessorParameters): void {
if (!this.audioProcessor) {
throw new Error('No AudioProcessor')
}
var cutoff = targetSampleRate / 2;
var filter = new Float32Array(length);
var sum = 0;
for (var i = 0; i < length; i++) {
var x = sinc(((2 * cutoff) / sourceSampleRate) * (i - (length - 1) / 2));
sum += x;
filter[i] = x;
if (ap.vad) {
if (!this.audioProcessor.vad) {
throw new Error('No VAD in AudioProcessor. Did you define `vad` in BrowserClient constructor parameters?')
}
this.audioProcessor.vad.adjustVadOptions(ap.vad)
}
for (var i = 0; i < length; i++) {
filter[i] = filter[i] / sum;
}
setSharedArrayBuffers(controlSAB: number, dataSAB: number): void {
this.controlSAB = new Int32Array(controlSAB)
this.dataSAB = new Float32Array(dataSAB)
const audioHandleInterval = this.dataSAB.length / 32 // ms
if (this.debug) {
console.log('[WebSocketClient]', 'Audio handle interval', audioHandleInterval, 'ms')
}
return filter;
}
function sinc(x) {
if (x === 0.0) {
return 1.0;
setInterval(this.processAudioSAB.bind(this), audioHandleInterval)
}
startStream(defaultContextOptions?: ContextOptions): void {
if (!this.audioProcessor) {
throw new Error('No AudioProcessor')
}
var piX = Math.PI * x;
return Math.sin(piX) / piX;
this.defaultContextOptions = defaultContextOptions
this.audioProcessor.resetStream()
}
stopStream(): void {
if (!this.audioProcessor) {
throw new Error('No AudioProcessor')
}
if (this.isContextStarted) {
// Ensure stopContext is called in immediate mode
this.stopContext()
}
this.defaultContextOptions = undefined
}
/**
* Processes and sends audio
* @param audioChunk - audio data to process
*/
processAudio(audioChunk: Float32Array): void {
if (!this.audioProcessor) {
throw new Error('No AudioProcessor')
}
this.audioProcessor.processAudio(audioChunk)
}
processAudioSAB(): void {
if (!this.controlSAB || !this.dataSAB) {
throw new Error('No SharedArrayBuffers')
}
const framesAvailable = this.controlSAB[CONTROL.FRAMES_AVAILABLE]
const lock = this.controlSAB[CONTROL.LOCK]
if (lock === 0 && framesAvailable > 0) {
const data = this.dataSAB.subarray(0, framesAvailable)
this.controlSAB[CONTROL.FRAMES_AVAILABLE] = 0
this.controlSAB[CONTROL.WRITE_INDEX] = 0
if (data.length > 0) {
this.processAudio(data)
}
}
}
startContext(appId?: string): void {
if (!this.audioProcessor) {
throw Error('No AudioProcessor')
}
if (this.isContextStarted) {
console.error('[WebSocketClient]', "can't start context: active context exists")
return
}
this.audioProcessor.startContext()
this.isContextStarted = true
if (appId !== undefined) {
this.send(JSON.stringify({ event: 'start', appId }))
} else {
this.send(JSON.stringify({ event: 'start' }))
}
}
stopContext(): void {
if (!this.audioProcessor) {
throw Error('No AudioProcessor')
}
if (!this.isContextStarted) {
console.error('[WebSocketClient]', "can't stop context: no active context")
return
}
this.audioProcessor.stopContext()
this.isContextStarted = false
const StopEventJSON = JSON.stringify({ event: 'stop' })
this.send(StopEventJSON)
}
switchContext(newAppId: string): void {
if (!this.websocket) {
throw Error('WebSocket is undefined')
}
if (!this.isContextStarted) {
console.error('[WebSocketClient]', "can't switch context: no active context")
return
}
if (newAppId === undefined) {
console.error('[WebSocketClient]', "can't switch context: new app id is undefined")
return
}
const StopEventJSON = JSON.stringify({ event: 'stop' })
this.send(StopEventJSON)
this.send(JSON.stringify({ event: 'start', appId: newAppId }))
}
closeWebsocket(websocketCode: number = 1005, reason: string = 'No Status Received'): void {
if (this.debug) {
console.log('[WebSocketClient]', 'Websocket closing')
}
if (!this.websocket) {
throw Error('WebSocket is undefined')
}
this.websocket.close(websocketCode, reason)
}
// WebSocket's close handler, called e.g. when
// - normal close (code 1000)
// - network unreachable or unable to (re)connect (code 1006)
// List of CloseEvent.code values: https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent/code
private readonly onWebsocketClose = (event: CloseEvent): void => {
if (!this.websocket) {
throw Error('WebSocket is undefined')
}
if (this.debug) {
console.log('[WebSocketClient]', 'onWebsocketClose')
}
this.websocket.removeEventListener('open', this.onWebsocketOpen)
this.websocket.removeEventListener('message', this.onWebsocketMessage)
this.websocket.removeEventListener('error', this.onWebsocketError)
this.websocket.removeEventListener('close', this.onWebsocketClose)
this.websocket = undefined
this.workerCtx.postMessage({
type: WorkerSignal.Closed,
code: event.code,
reason: event.reason,
wasClean: event.wasClean,
})
}
private readonly onWebsocketOpen = (_event: Event): void => {
if (this.debug) {
console.log('[WebSocketClient]', 'websocket opened')
}
this.workerCtx.postMessage({ type: WorkerSignal.Opened })
}
private readonly onWebsocketError = (_event: Event): void => {
if (this.debug) {
console.log('[WebSocketClient]', 'websocket error')
}
}
private readonly onWebsocketMessage = (event: MessageEvent): void => {
let response: WebsocketResponse
try {
response = JSON.parse(event.data)
} catch (e) {
console.error('[WebSocketClient]', 'error parsing response from the server:', e)
return
}
this.workerCtx.postMessage(response)
}
send(data: string | Int16Array): void {
if (!this.websocket) {
throw new Error('No Websocket')
}
if (this.websocket.readyState !== this.websocket.OPEN) {
throw new Error(`Expected OPEN Websocket state, but got ${this.websocket.readyState}`)
}
try {
this.websocket.send(data)
} catch (error) {
console.log('[WebSocketClient]', 'server connection error', error)
}
}
}
`
const ctx: Worker = self as any
const websocketClient = new WebsocketClient(ctx)
ctx.onmessage = function (e) {
switch (e.data.type) {
case ControllerSignal.connect:
websocketClient.connect(e.data.apiUrl, e.data.authToken, e.data.targetSampleRate, e.data.debug)
break
case ControllerSignal.initAudioProcessor:
websocketClient.initAudioProcessor(e.data.sourceSampleRate, e.data.vadOptions)
break
case ControllerSignal.adjustAudioProcessor:
websocketClient.adjustAudioProcessor(e.data.params)
break
case ControllerSignal.SET_SHARED_ARRAY_BUFFERS:
websocketClient.setSharedArrayBuffers(e.data.controlSAB, e.data.dataSAB)
break
case ControllerSignal.CLOSE:
websocketClient.closeWebsocket(1000, 'Close requested by client')
break
case ControllerSignal.startStream:
websocketClient.startStream(e.data.options)
break
case ControllerSignal.stopStream:
websocketClient.stopStream()
break
case ControllerSignal.START_CONTEXT:
websocketClient.startContext(e.data.appId)
break
case ControllerSignal.SWITCH_CONTEXT:
websocketClient.switchContext(e.data.appId)
break
case ControllerSignal.STOP_CONTEXT:
websocketClient.stopContext()
break
case ControllerSignal.AUDIO:
websocketClient.processAudio(e.data.payload)
break
default:
console.log('WORKER', e)
}
}
export default WebsocketClient

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc