@speechly/browser-client
Advanced tools
Comparing version 1.0.15 to 1.0.16
@@ -100,2 +100,3 @@ | ||
private readonly nativeResamplingSupported; | ||
private readonly autoGainControl; | ||
private readonly activeContexts; | ||
@@ -203,2 +204,6 @@ private readonly reconnectAttemptCount; | ||
private setState; | ||
/** | ||
* print statistics to console | ||
*/ | ||
printStats(): void; | ||
} | ||
@@ -431,3 +436,3 @@ | ||
*/ | ||
initialize(audioContext: AudioContext, opts: MediaStreamConstraints): Promise<void>; | ||
initialize(audioContext: AudioContext, mediaStreamConstraints: MediaStreamConstraints): Promise<void>; | ||
/** | ||
@@ -449,2 +454,6 @@ * Closes the microphone, tearing down all the infrastructure. | ||
unmute(): void; | ||
/** | ||
* Print usage stats to console in debug mode. | ||
*/ | ||
printStats(): void; | ||
} | ||
@@ -451,0 +460,0 @@ |
@@ -1,2 +0,2 @@ | ||
declare const _default: "\n// Indices for the Control SAB.\nconst CONTROL = {\n 'WRITE_INDEX': 0,\n 'FRAMES_AVAILABLE': 1,\n 'LOCK': 2,\n};\n\nclass SpeechlyProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n\n this._initialized = false;\n this.port.onmessage = this._initialize.bind(this);\n }\n\n _initialize(event) {\n this.controlSAB = new Int32Array(event.data.controlSAB);\n this.dataSAB = new Float32Array(event.data.dataSAB);\n this.sharedBufferSize = this.dataSAB.length;\n this.buffer = new Float32Array(0);\n this._initialized = true;\n }\n\n _transferDataToSharedBuffer(data) {\n this.controlSAB[CONTROL.LOCK] = 1\n let inputWriteIndex = this.controlSAB[CONTROL.WRITE_INDEX]\n if (this.controlSAB[CONTROL.FRAMES_AVAILABLE] > 0) {\n if (inputWriteIndex + data.length > this.sharedBufferSize) {\n // console.log('buffer overflow')\n inputWriteIndex = 0\n }\n }\n this.dataSAB.set(data, inputWriteIndex)\n this.controlSAB[CONTROL.WRITE_INDEX] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.LOCK] = 0\n }\n\n _pushData(data) {\n if (this.buffer.length > this.sharedBufferSize) {\n const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize)\n this._transferDataToSharedBuffer(dataToTransfer)\n this.buffer = this.buffer.subarray(this.sharedBufferSize)\n }\n let concat = new Float32Array(this.buffer.length + data.length)\n concat.set(this.buffer)\n concat.set(data, this.buffer.length)\n this.buffer = concat\n }\n\n process(inputs, outputs, parameters) {\n const inputChannelData = inputs[0][0];\n if (inputChannelData !== undefined) {\n if (this.controlSAB && this.dataSAB) {\n this._pushData(inputChannelData);\n } else {\n this.port.postMessage(inputChannelData);\n }\n }\n \n return true;\n }\n}\n\nregisterProcessor('speechly-worklet', SpeechlyProcessor);\n"; | ||
declare const _default: "\n// Indices for the Control SAB.\nconst CONTROL = {\n 'WRITE_INDEX': 0,\n 'FRAMES_AVAILABLE': 1,\n 'LOCK': 2,\n};\n\nclass SpeechlyProcessor extends AudioWorkletProcessor {\n constructor() {\n super();\n\n this._initialized = false;\n this.debug = false;\n this.port.onmessage = this._initialize.bind(this);\n }\n\n _initialize(event) {\n this.controlSAB = new Int32Array(event.data.controlSAB);\n this.dataSAB = new Float32Array(event.data.dataSAB);\n this.debug = event.data.debug;\n this.sharedBufferSize = this.dataSAB.length;\n this.buffer = new Float32Array(0);\n this._initialized = true;\n }\n\n _transferDataToSharedBuffer(data) {\n this.controlSAB[CONTROL.LOCK] = 1\n let inputWriteIndex = this.controlSAB[CONTROL.WRITE_INDEX]\n if (this.controlSAB[CONTROL.FRAMES_AVAILABLE] > 0) {\n if (inputWriteIndex + data.length > this.sharedBufferSize) {\n // console.log('buffer overflow')\n inputWriteIndex = 0\n }\n }\n this.dataSAB.set(data, inputWriteIndex)\n this.controlSAB[CONTROL.WRITE_INDEX] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.FRAMES_AVAILABLE] = inputWriteIndex + data.length\n this.controlSAB[CONTROL.LOCK] = 0\n }\n\n _pushData(data) {\n if (this.debug) {\n const signalEnergy = getStandardDeviation(data)\n this.port.postMessage({\n type: 'STATS',\n signalEnergy: signalEnergy\n });\n }\n\n if (this.buffer.length > this.sharedBufferSize) {\n const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize)\n this._transferDataToSharedBuffer(dataToTransfer)\n this.buffer = this.buffer.subarray(this.sharedBufferSize)\n }\n let concat = new Float32Array(this.buffer.length + data.length)\n concat.set(this.buffer)\n concat.set(data, this.buffer.length)\n this.buffer = concat\n }\n\n process(inputs, outputs, parameters) {\n const inputChannelData = inputs[0][0];\n if (inputChannelData !== undefined) {\n if (this.controlSAB && this.dataSAB) {\n this._pushData(inputChannelData);\n } else {\n this.port.postMessage({\n type: 'DATA',\n frames: inputChannelData\n });\n }\n }\n \n return true;\n }\n}\n\nfunction getStandardDeviation(array) {\n const n = array.length\n const mean = array.reduce((a, b) => a + b) / n\n return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n)\n}\n\nregisterProcessor('speechly-worklet', SpeechlyProcessor);\n"; | ||
export default _default; |
@@ -16,2 +16,3 @@ "use strict"; | ||
this._initialized = false; | ||
this.debug = false; | ||
this.port.onmessage = this._initialize.bind(this); | ||
@@ -23,2 +24,3 @@ } | ||
this.dataSAB = new Float32Array(event.data.dataSAB); | ||
this.debug = event.data.debug; | ||
this.sharedBufferSize = this.dataSAB.length; | ||
@@ -45,2 +47,10 @@ this.buffer = new Float32Array(0); | ||
_pushData(data) { | ||
if (this.debug) { | ||
const signalEnergy = getStandardDeviation(data) | ||
this.port.postMessage({ | ||
type: 'STATS', | ||
signalEnergy: signalEnergy | ||
}); | ||
} | ||
if (this.buffer.length > this.sharedBufferSize) { | ||
@@ -63,3 +73,6 @@ const dataToTransfer = this.buffer.subarray(0, this.sharedBufferSize) | ||
} else { | ||
this.port.postMessage(inputChannelData); | ||
this.port.postMessage({ | ||
type: 'DATA', | ||
frames: inputChannelData | ||
}); | ||
} | ||
@@ -72,4 +85,10 @@ } | ||
function getStandardDeviation(array) { | ||
const n = array.length | ||
const mean = array.reduce((a, b) => a + b) / n | ||
return Math.sqrt(array.map(x => Math.pow(x - mean, 2)).reduce((a, b) => a + b) / n) | ||
} | ||
registerProcessor('speechly-worklet', SpeechlyProcessor); | ||
`; | ||
//# sourceMappingURL=audioworklet.js.map |
@@ -15,4 +15,5 @@ import { Microphone } from './types'; | ||
private audioProcessor?; | ||
private stats; | ||
constructor(isWebkit: boolean, sampleRate: number, apiClient: APIClient, debug?: boolean); | ||
initialize(audioContext: AudioContext, opts: MediaStreamConstraints): Promise<void>; | ||
initialize(audioContext: AudioContext, mediaStreamConstraints: MediaStreamConstraints): Promise<void>; | ||
close(): Promise<void>; | ||
@@ -22,2 +23,6 @@ mute(): void; | ||
private readonly handleAudio; | ||
/** | ||
* print statistics to console | ||
*/ | ||
printStats(): void; | ||
} |
@@ -23,2 +23,5 @@ "use strict"; | ||
this.muted = false; | ||
this.stats = { | ||
maxSignalEnergy: 0.0, | ||
}; | ||
this.handleAudio = (array) => { | ||
@@ -37,3 +40,3 @@ if (this.muted) { | ||
} | ||
initialize(audioContext, opts) { | ||
initialize(audioContext, mediaStreamConstraints) { | ||
var _a; | ||
@@ -47,3 +50,3 @@ return __awaiter(this, void 0, void 0, function* () { | ||
try { | ||
this.mediaStream = yield window.navigator.mediaDevices.getUserMedia(opts); | ||
this.mediaStream = yield window.navigator.mediaDevices.getUserMedia(mediaStreamConstraints); | ||
} | ||
@@ -84,13 +87,24 @@ catch (_b) { | ||
dataSAB, | ||
debug: this.debug, | ||
}); | ||
} | ||
else { | ||
// Opera, Chrome Android, Webview Anroid | ||
if (this.debug) { | ||
console.log('[SpeechlyClient]', 'can not use SharedArrayBuffer'); | ||
} | ||
// Opera, Chrome Android, Webview Anroid | ||
speechlyNode.port.onmessage = (event) => { | ||
this.handleAudio(event.data); | ||
}; | ||
} | ||
speechlyNode.port.onmessage = (event) => { | ||
switch (event.data.type) { | ||
case 'STATS': | ||
if (event.data.signalEnergy > this.stats.maxSignalEnergy) { | ||
this.stats.maxSignalEnergy = event.data.signalEnergy; | ||
} | ||
break; | ||
case 'DATA': | ||
this.handleAudio(event.data.frames); | ||
break; | ||
default: | ||
} | ||
}; | ||
} | ||
@@ -150,4 +164,17 @@ else { | ||
} | ||
/** | ||
* print statistics to console | ||
*/ | ||
printStats() { | ||
if (this.audioTrack != null) { | ||
const settings = this.audioTrack.getSettings(); | ||
console.log(this.audioTrack.label, this.audioTrack.readyState); | ||
console.log('channelCount', settings.channelCount); | ||
console.log('latency', settings.latency); | ||
console.log('autoGainControl', settings.autoGainControl); | ||
} | ||
console.log('maxSignalEnergy', this.stats.maxSignalEnergy); | ||
} | ||
} | ||
exports.BrowserMicrophone = BrowserMicrophone; | ||
//# sourceMappingURL=browser_microphone.js.map |
@@ -43,3 +43,3 @@ /** | ||
*/ | ||
initialize(audioContext: AudioContext, opts: MediaStreamConstraints): Promise<void>; | ||
initialize(audioContext: AudioContext, mediaStreamConstraints: MediaStreamConstraints): Promise<void>; | ||
/** | ||
@@ -61,2 +61,6 @@ * Closes the microphone, tearing down all the infrastructure. | ||
unmute(): void; | ||
/** | ||
* Print usage stats to console in debug mode. | ||
*/ | ||
printStats(): void; | ||
} |
{ | ||
"name": "@speechly/browser-client", | ||
"version": "1.0.15", | ||
"version": "1.0.16", | ||
"description": "Browser client for Speechly API", | ||
@@ -5,0 +5,0 @@ "private": false, |
@@ -5,3 +5,3 @@ <h1 align="center"> | ||
<h2 align="center"> | ||
Complete your touch user interface with voice | ||
Speechly is the Fast, Accurate, and Simple Voice Interface API for Web and Mobile Apps | ||
</h2> | ||
@@ -8,0 +8,0 @@ |
@@ -25,2 +25,3 @@ import { ClientOptions, StateChangeCallback, SegmentChangeCallback, TentativeTranscriptCallback, TranscriptCallback, TentativeEntitiesCallback, EntityCallback, IntentCallback } from './types'; | ||
private readonly nativeResamplingSupported; | ||
private readonly autoGainControl; | ||
private readonly activeContexts; | ||
@@ -128,2 +129,6 @@ private readonly reconnectAttemptCount; | ||
private setState; | ||
/** | ||
* print statistics to console | ||
*/ | ||
printStats(): void; | ||
} |
@@ -143,5 +143,7 @@ "use strict"; | ||
this.nativeResamplingSupported = constraints.sampleRate === true; | ||
this.autoGainControl = constraints.autoGainControl === true; | ||
} | ||
catch (_o) { | ||
this.nativeResamplingSupported = false; | ||
this.autoGainControl = false; | ||
} | ||
@@ -244,12 +246,13 @@ const language = (_b = options.language) !== null && _b !== void 0 ? _b : defaultLanguage; | ||
} | ||
const opts = { | ||
const mediaStreamConstraints = { | ||
video: false, | ||
}; | ||
if (this.nativeResamplingSupported) { | ||
opts.audio = { | ||
if (this.nativeResamplingSupported || this.autoGainControl) { | ||
mediaStreamConstraints.audio = { | ||
sampleRate: this.sampleRate, | ||
autoGainControl: this.autoGainControl, | ||
}; | ||
} | ||
else { | ||
opts.audio = true; | ||
mediaStreamConstraints.audio = true; | ||
} | ||
@@ -269,3 +272,3 @@ if (this.audioContext != null) { | ||
yield this.apiClient.setSourceSampleRate(this.audioContext.sampleRate); | ||
this.initializeMicrophonePromise = this.microphone.initialize(this.audioContext, opts); | ||
this.initializeMicrophonePromise = this.microphone.initialize(this.audioContext, mediaStreamConstraints); | ||
yield this.initializeMicrophonePromise; | ||
@@ -500,2 +503,8 @@ } | ||
} | ||
/** | ||
* print statistics to console | ||
*/ | ||
printStats() { | ||
this.microphone.printStats(); | ||
} | ||
} | ||
@@ -502,0 +511,0 @@ exports.Client = Client; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
190408
3362