Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@scoopika/client

Package Overview
Dependencies
Maintainers
1
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@scoopika/client - npm Package Compare versions

Comparing version 1.0.6 to 1.0.7

41

index.d.ts
import * as types from '@scoopika/types';
import { StoreSession, RunHistory, ServerRequest, ToolFunction, AudioRes, Base64Audio } from '@scoopika/types';
import { StoreSession, RunHistory, ServerRequest, ToolFunction, AudioStream, RunInputs } from '@scoopika/types';
import { JSONSchema } from 'json-schema-to-ts';

@@ -66,22 +66,20 @@ export { FromSchema, JSONSchema } from 'json-schema-to-ts';

declare class AudioPlayer {
private audio_queue;
private done_indexex;
declare class RunVoicePlayer {
private listeners;
private audioContext;
source?: AudioBufferSourceNode;
stopped: boolean;
private currentSource?;
private done_indexes;
paused: boolean;
started: boolean;
constructor();
queue(call: AudioRes): void;
read(call: AudioRes): Promise<void>;
queue(stream: AudioStream): Promise<void>;
private readAudio;
private play;
pause(): void;
resume(): void;
stop(): void;
unStop(play?: boolean): void;
reset(): void;
}
type OnTextCallback = (text: string) => void;
type State = "recording" | "stopped" | "paused";
declare class AudioRecorder {
declare class VoiceRecorder {
mediaRecorder: MediaRecorder | null;

@@ -101,4 +99,10 @@ private audioChunks;

onAudioProcess?: (dataArray: Uint8Array) => any;
onText?: OnTextCallback;
visualizer?: (dataArray: Uint8Array) => any;
private dataRequestInterval;
smoothDataArray: any[];
circleRadius: number;
recognition: any | null;
text: string;
isRecognitionFinished: boolean;
constructor(options?: {

@@ -108,2 +112,3 @@ onAudioChunk?: (chunk: Blob) => any;

onAudioProcess?: (dataArray: Uint8Array) => any;
onText?: OnTextCallback;
});

@@ -116,2 +121,5 @@ private changeState;

init(): Promise<boolean>;
private initSpeechRecognition;
private startSpeechRecognition;
private stopSpeechRecognition;
start(): this;

@@ -124,7 +132,8 @@ stop(): this;

toString(): string | null;
asRunInput(): Promise<Base64Audio | undefined>;
addVisualizer(element: HTMLCanvasElement | string): void;
visualize(element: HTMLCanvasElement | string, dataArray: Uint8Array): void;
asRunInput(): Promise<RunInputs | undefined>;
finish(): Promise<this>;
addVisualizer(element: HTMLCanvasElement | string, color: string): void;
visualize(element: HTMLCanvasElement | string, dataArray: Uint8Array, color: string): void;
}
export { Agent, AudioPlayer, AudioRecorder, Box, Client, Store, createActionSchema };
export { Agent, Box, Client, RunVoicePlayer as RunAudioPlayer, Store, VoiceRecorder, createActionSchema };

@@ -25,7 +25,7 @@ "use strict";

Agent: () => agent_default,
AudioPlayer: () => audio_player_default,
AudioRecorder: () => audio_recorder_default,
Box: () => box_default,
Client: () => client_default,
RunAudioPlayer: () => audio_player_default,
Store: () => store_default,
VoiceRecorder: () => voice_recorder_default,
createActionSchema: () => createActionSchema

@@ -395,12 +395,12 @@ });

// src/audio_player.ts
var AudioPlayer = class {
audio_queue = [];
done_indexex = [];
var RunVoicePlayer = class {
listeners = {};
audioContext;
source;
stopped = false;
currentSource;
done_indexes = [];
paused = false;
started = false;
constructor() {
if (typeof window === void 0) {
throw new Error("The audio player runs only in browser envrionment");
throw new Error("The audio player runs only in browser environment");
}

@@ -410,62 +410,66 @@ const audioContext = new (window.AudioContext || window.webkitAudioContext)();

}
queue(call) {
this.read(call);
async queue(stream) {
this.started = true;
const audio_buffer = await this.readAudio(stream);
if (stream.index === 0) {
this.play(stream.index, audio_buffer);
return;
}
if (this.done_indexes.indexOf(stream.index - 1) !== -1) {
this.play(stream.index, audio_buffer);
return;
}
const play = this.play.bind(this);
this.listeners[stream.index - 1] = () => {
play(stream.index, audio_buffer);
};
}
async read(call) {
const res = await fetch(call.read);
const buffer = await res.arrayBuffer();
const audio_buffer = await this.audioContext.decodeAudioData(buffer);
this.audio_queue.push({ index: call.index, buffer: audio_buffer });
this.play(call.index, audio_buffer);
async readAudio(stream) {
const res = await fetch(stream.read);
const arr = await res.arrayBuffer();
const buffer = await this.audioContext.decodeAudioData(arr);
return buffer;
}
play(index, buffer) {
if (index !== 0 && this.done_indexex.indexOf(index - 1) === -1) {
return;
}
const source = this.audioContext.createBufferSource();
source.buffer = buffer;
source.connect(this.audioContext.destination);
this.source = source;
source.onended = () => {
this.done_indexex.push(index);
const next = this.audio_queue.filter((a) => a.index === index + 1)[0];
if (next)
this.play(next.index, next.buffer);
this.currentSource = this.audioContext.createBufferSource();
this.currentSource.buffer = buffer;
this.currentSource.connect(this.audioContext.destination);
this.currentSource.onended = () => {
const listener = this.listeners[index];
if (listener)
listener();
this.done_indexes.push(index);
};
if (!this.stopped && !this.paused) {
source.start();
if (!this.paused) {
this.currentSource.start();
}
}
pause() {
if (!this.source)
return;
this.source.stop();
if (this.currentSource)
this.currentSource.stop();
this.paused = true;
}
resume() {
if (!this.source)
if (!this.paused)
return;
this.source.start();
if (this.currentSource)
this.currentSource.start();
this.paused = false;
}
stop() {
if (this.source)
this.source.stop();
this.stopped = true;
}
unStop(play = true) {
if (this.source && play)
this.source.start();
this.stopped = false;
}
reset() {
if (this.source)
this.source.stop();
this.audio_queue = [];
}
};
var audio_player_default = AudioPlayer;
var audio_player_default = RunVoicePlayer;
// src/audio_recorder.ts
var AudioRecorder = class {
// src/lib/sleep.ts
function sleep(ms) {
if (typeof ms !== "number") {
ms = 0;
}
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
var sleep_default = sleep;
// src/voice_recorder.ts
var VoiceRecorder = class {
mediaRecorder = null;

@@ -485,4 +489,10 @@ audioChunks = [];

onAudioProcess;
onText;
visualizer;
dataRequestInterval = null;
smoothDataArray = new Array(4).fill(0);
circleRadius = 0;
recognition = null;
text = "";
isRecognitionFinished = false;
constructor(options) {

@@ -492,2 +502,4 @@ this.onAudioChunk = options == null ? void 0 : options.onAudioChunk;

this.onAudioProcess = options == null ? void 0 : options.onAudioProcess;
this.onText = options == null ? void 0 : options.onText;
this.initSpeechRecognition();
}

@@ -581,5 +593,47 @@ changeState(state) {

}
initSpeechRecognition() {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
if (!SpeechRecognition) {
console.warn("Speech recognition not supported in this browser.");
return;
}
this.recognition = new SpeechRecognition();
this.recognition.continuous = true;
this.recognition.interimResults = true;
this.recognition.lang = "en-US";
this.recognition.onresult = (event) => {
let fullTranscript = "";
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript;
fullTranscript += transcript;
if (event.results[i].isFinal) {
this.text += transcript;
}
}
this.text = fullTranscript;
if (this.onText)
this.onText(this.text);
};
this.recognition.onerror = (event) => {
console.error("Speech recognition error", event);
};
this.recognition.onend = () => {
this.isRecognitionFinished = true;
};
}
startSpeechRecognition() {
if (this.recognition && !this.isPaused) {
this.recognition.start();
}
}
stopSpeechRecognition() {
if (this.recognition) {
this.recognition.stop();
}
}
start() {
this.audioBlob = null;
this.audioChunks = [];
this.text = "";
this.isRecognitionFinished = false;
this.init().then(() => {

@@ -590,2 +644,3 @@ if (!this.isRecording && this.mediaRecorder) {

this.isPaused = false;
this.startSpeechRecognition();
}

@@ -599,2 +654,5 @@ });

this.mediaRecorder.stop();
this.stopSpeechRecognition();
if (this.visualizer)
this.visualizer([]);
return this;

@@ -607,2 +665,3 @@ }

this.isPaused = true;
this.stopSpeechRecognition();
}

@@ -616,2 +675,4 @@ return this;

this.isRecording = true;
this.isRecognitionFinished = false;
this.startSpeechRecognition();
}

@@ -636,13 +697,28 @@ }

}
asRunInput() {
async asRunInput() {
if (this.recognition) {
while (!this.isRecognitionFinished) {
await sleep_default(5);
}
if (this.text.length > 0) {
return {
message: this.text
};
}
}
return new Promise((resolve) => {
if (!this.audioBlob)
return void 0;
if (!this.audioBlob) {
resolve(void 0);
return;
}
const reader = new FileReader();
reader.onloadend = () => {
const dataUrl = reader.result;
const base64 = dataUrl.split(",")[1];
resolve({
type: "base64",
value: base64
audio: [
{
type: "base64",
value: dataUrl
}
]
});

@@ -653,10 +729,21 @@ };

}
addVisualizer(element) {
async finish() {
this.stop();
if (!this.recognition) {
return this;
}
while (!this.isRecognitionFinished) {
await sleep_default(5);
}
return this;
}
addVisualizer(element, color) {
const visualize = this.visualize.bind(this);
visualize(element, this.dataArray || [], color);
const visualizeFunction = (dataArray) => {
visualize(element, dataArray);
visualize(element, dataArray, color);
};
this.visualizer = visualizeFunction;
}
visualize(element, dataArray) {
visualize(element, dataArray, color) {
const elm = typeof element === "string" ? document.getElementById(element) : element;

@@ -673,26 +760,60 @@ if (!elm) {

const height = canvas.height;
const barCount = 4;
const centerX = width / 2;
const centerY = height / 2;
const barWidth = width / (barCount * 2);
const barSpacing = barWidth / 2;
const maxRadius = barWidth / 2;
const targetRadius = dataArray.length !== 0 && this.state === "recording" ? 0 : width;
this.circleRadius += (targetRadius - this.circleRadius) * 0.1;
canvasCtx.clearRect(0, 0, width, height);
canvasCtx.fillStyle = "rgb(200, 200, 200)";
canvasCtx.fillRect(0, 0, width, height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "rgb(0, 0, 0)";
canvasCtx.beginPath();
const sliceWidth = width * 1 / dataArray.length;
let x = 0;
for (let i = 0; i < dataArray.length; i++) {
const v = dataArray[i] / 128;
const y = v * height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
if (this.circleRadius > 1) {
canvasCtx.fillStyle = color;
canvasCtx.beginPath();
canvasCtx.arc(centerX, centerY, this.circleRadius, 0, 2 * Math.PI);
canvasCtx.fill();
} else {
const totalWaveWidth = barCount * barWidth + (barCount - 1) * barSpacing;
const startX = (width - totalWaveWidth) / 2;
for (let i = 0; i < barCount; i++) {
const barHeight = dataArray[i] / 128 * centerY;
this.smoothDataArray[i] += (barHeight - this.smoothDataArray[i]) * 0.5;
}
x += sliceWidth;
this.smoothDataArray.forEach((barHeight, i) => {
const barX = startX + i * (barWidth + barSpacing);
const barY = centerY - barHeight / 2;
canvasCtx.fillStyle = color;
canvasCtx.beginPath();
canvasCtx.moveTo(barX + maxRadius, barY);
canvasCtx.lineTo(barX + barWidth - maxRadius, barY);
canvasCtx.quadraticCurveTo(
barX + barWidth,
barY,
barX + barWidth,
barY + maxRadius
);
canvasCtx.lineTo(barX + barWidth, barY + barHeight - maxRadius);
canvasCtx.quadraticCurveTo(
barX + barWidth,
barY + barHeight,
barX + barWidth - maxRadius,
barY + barHeight
);
canvasCtx.lineTo(barX + maxRadius, barY + barHeight);
canvasCtx.quadraticCurveTo(
barX,
barY + barHeight,
barX,
barY + barHeight - maxRadius
);
canvasCtx.lineTo(barX, barY + maxRadius);
canvasCtx.quadraticCurveTo(barX, barY, barX + maxRadius, barY);
canvasCtx.closePath();
canvasCtx.fill();
});
}
canvasCtx.lineTo(canvas.width, canvas.height / 1);
canvasCtx.stroke();
}
};
var audio_recorder_default = AudioRecorder;
var voice_recorder_default = VoiceRecorder;
return __toCommonJS(src_exports);
})();

@@ -24,4 +24,2 @@ "use strict";

Agent: () => agent_default,
AudioPlayer: () => audio_player_default,
AudioRecorder: () => audio_recorder_default,
Box: () => box_default,

@@ -31,3 +29,5 @@ Client: () => client_default,

JSONSchema: () => import_json_schema_to_ts.JSONSchema,
RunAudioPlayer: () => audio_player_default,
Store: () => store_default,
VoiceRecorder: () => voice_recorder_default,
createActionSchema: () => createActionSchema

@@ -401,12 +401,12 @@ });

// src/audio_player.ts
var AudioPlayer = class {
audio_queue = [];
done_indexex = [];
var RunVoicePlayer = class {
listeners = {};
audioContext;
source;
stopped = false;
currentSource;
done_indexes = [];
paused = false;
started = false;
constructor() {
if (typeof window === void 0) {
throw new Error("The audio player runs only in browser envrionment");
throw new Error("The audio player runs only in browser environment");
}

@@ -416,62 +416,66 @@ const audioContext = new (window.AudioContext || window.webkitAudioContext)();

}
queue(call) {
this.read(call);
async queue(stream) {
this.started = true;
const audio_buffer = await this.readAudio(stream);
if (stream.index === 0) {
this.play(stream.index, audio_buffer);
return;
}
if (this.done_indexes.indexOf(stream.index - 1) !== -1) {
this.play(stream.index, audio_buffer);
return;
}
const play = this.play.bind(this);
this.listeners[stream.index - 1] = () => {
play(stream.index, audio_buffer);
};
}
async read(call) {
const res = await fetch(call.read);
const buffer = await res.arrayBuffer();
const audio_buffer = await this.audioContext.decodeAudioData(buffer);
this.audio_queue.push({ index: call.index, buffer: audio_buffer });
this.play(call.index, audio_buffer);
async readAudio(stream) {
const res = await fetch(stream.read);
const arr = await res.arrayBuffer();
const buffer = await this.audioContext.decodeAudioData(arr);
return buffer;
}
play(index, buffer) {
if (index !== 0 && this.done_indexex.indexOf(index - 1) === -1) {
return;
}
const source = this.audioContext.createBufferSource();
source.buffer = buffer;
source.connect(this.audioContext.destination);
this.source = source;
source.onended = () => {
this.done_indexex.push(index);
const next = this.audio_queue.filter((a) => a.index === index + 1)[0];
if (next)
this.play(next.index, next.buffer);
this.currentSource = this.audioContext.createBufferSource();
this.currentSource.buffer = buffer;
this.currentSource.connect(this.audioContext.destination);
this.currentSource.onended = () => {
const listener = this.listeners[index];
if (listener)
listener();
this.done_indexes.push(index);
};
if (!this.stopped && !this.paused) {
source.start();
if (!this.paused) {
this.currentSource.start();
}
}
pause() {
if (!this.source)
return;
this.source.stop();
if (this.currentSource)
this.currentSource.stop();
this.paused = true;
}
resume() {
if (!this.source)
if (!this.paused)
return;
this.source.start();
if (this.currentSource)
this.currentSource.start();
this.paused = false;
}
stop() {
if (this.source)
this.source.stop();
this.stopped = true;
}
unStop(play = true) {
if (this.source && play)
this.source.start();
this.stopped = false;
}
reset() {
if (this.source)
this.source.stop();
this.audio_queue = [];
}
};
var audio_player_default = AudioPlayer;
var audio_player_default = RunVoicePlayer;
// src/audio_recorder.ts
var AudioRecorder = class {
// src/lib/sleep.ts
function sleep(ms) {
if (typeof ms !== "number") {
ms = 0;
}
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
var sleep_default = sleep;
// src/voice_recorder.ts
var VoiceRecorder = class {
mediaRecorder = null;

@@ -491,4 +495,10 @@ audioChunks = [];

onAudioProcess;
onText;
visualizer;
dataRequestInterval = null;
smoothDataArray = new Array(4).fill(0);
circleRadius = 0;
recognition = null;
text = "";
isRecognitionFinished = false;
constructor(options) {

@@ -498,2 +508,4 @@ this.onAudioChunk = options == null ? void 0 : options.onAudioChunk;

this.onAudioProcess = options == null ? void 0 : options.onAudioProcess;
this.onText = options == null ? void 0 : options.onText;
this.initSpeechRecognition();
}

@@ -587,5 +599,47 @@ changeState(state) {

}
initSpeechRecognition() {
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
if (!SpeechRecognition) {
console.warn("Speech recognition not supported in this browser.");
return;
}
this.recognition = new SpeechRecognition();
this.recognition.continuous = true;
this.recognition.interimResults = true;
this.recognition.lang = "en-US";
this.recognition.onresult = (event) => {
let fullTranscript = "";
for (let i = event.resultIndex; i < event.results.length; i++) {
const transcript = event.results[i][0].transcript;
fullTranscript += transcript;
if (event.results[i].isFinal) {
this.text += transcript;
}
}
this.text = fullTranscript;
if (this.onText)
this.onText(this.text);
};
this.recognition.onerror = (event) => {
console.error("Speech recognition error", event);
};
this.recognition.onend = () => {
this.isRecognitionFinished = true;
};
}
startSpeechRecognition() {
if (this.recognition && !this.isPaused) {
this.recognition.start();
}
}
stopSpeechRecognition() {
if (this.recognition) {
this.recognition.stop();
}
}
start() {
this.audioBlob = null;
this.audioChunks = [];
this.text = "";
this.isRecognitionFinished = false;
this.init().then(() => {

@@ -596,2 +650,3 @@ if (!this.isRecording && this.mediaRecorder) {

this.isPaused = false;
this.startSpeechRecognition();
}

@@ -605,2 +660,5 @@ });

this.mediaRecorder.stop();
this.stopSpeechRecognition();
if (this.visualizer)
this.visualizer([]);
return this;

@@ -613,2 +671,3 @@ }

this.isPaused = true;
this.stopSpeechRecognition();
}

@@ -622,2 +681,4 @@ return this;

this.isRecording = true;
this.isRecognitionFinished = false;
this.startSpeechRecognition();
}

@@ -642,13 +703,28 @@ }

}
asRunInput() {
async asRunInput() {
if (this.recognition) {
while (!this.isRecognitionFinished) {
await sleep_default(5);
}
if (this.text.length > 0) {
return {
message: this.text
};
}
}
return new Promise((resolve) => {
if (!this.audioBlob)
return void 0;
if (!this.audioBlob) {
resolve(void 0);
return;
}
const reader = new FileReader();
reader.onloadend = () => {
const dataUrl = reader.result;
const base64 = dataUrl.split(",")[1];
resolve({
type: "base64",
value: base64
audio: [
{
type: "base64",
value: dataUrl
}
]
});

@@ -659,10 +735,21 @@ };

}
addVisualizer(element) {
async finish() {
this.stop();
if (!this.recognition) {
return this;
}
while (!this.isRecognitionFinished) {
await sleep_default(5);
}
return this;
}
addVisualizer(element, color) {
const visualize = this.visualize.bind(this);
visualize(element, this.dataArray || [], color);
const visualizeFunction = (dataArray) => {
visualize(element, dataArray);
visualize(element, dataArray, color);
};
this.visualizer = visualizeFunction;
}
visualize(element, dataArray) {
visualize(element, dataArray, color) {
const elm = typeof element === "string" ? document.getElementById(element) : element;

@@ -679,30 +766,62 @@ if (!elm) {

const height = canvas.height;
const barCount = 4;
const centerX = width / 2;
const centerY = height / 2;
const barWidth = width / (barCount * 2);
const barSpacing = barWidth / 2;
const maxRadius = barWidth / 2;
const targetRadius = dataArray.length !== 0 && this.state === "recording" ? 0 : width;
this.circleRadius += (targetRadius - this.circleRadius) * 0.1;
canvasCtx.clearRect(0, 0, width, height);
canvasCtx.fillStyle = "rgb(200, 200, 200)";
canvasCtx.fillRect(0, 0, width, height);
canvasCtx.lineWidth = 2;
canvasCtx.strokeStyle = "rgb(0, 0, 0)";
canvasCtx.beginPath();
const sliceWidth = width * 1 / dataArray.length;
let x = 0;
for (let i = 0; i < dataArray.length; i++) {
const v = dataArray[i] / 128;
const y = v * height / 2;
if (i === 0) {
canvasCtx.moveTo(x, y);
} else {
canvasCtx.lineTo(x, y);
if (this.circleRadius > 1) {
canvasCtx.fillStyle = color;
canvasCtx.beginPath();
canvasCtx.arc(centerX, centerY, this.circleRadius, 0, 2 * Math.PI);
canvasCtx.fill();
} else {
const totalWaveWidth = barCount * barWidth + (barCount - 1) * barSpacing;
const startX = (width - totalWaveWidth) / 2;
for (let i = 0; i < barCount; i++) {
const barHeight = dataArray[i] / 128 * centerY;
this.smoothDataArray[i] += (barHeight - this.smoothDataArray[i]) * 0.5;
}
x += sliceWidth;
this.smoothDataArray.forEach((barHeight, i) => {
const barX = startX + i * (barWidth + barSpacing);
const barY = centerY - barHeight / 2;
canvasCtx.fillStyle = color;
canvasCtx.beginPath();
canvasCtx.moveTo(barX + maxRadius, barY);
canvasCtx.lineTo(barX + barWidth - maxRadius, barY);
canvasCtx.quadraticCurveTo(
barX + barWidth,
barY,
barX + barWidth,
barY + maxRadius
);
canvasCtx.lineTo(barX + barWidth, barY + barHeight - maxRadius);
canvasCtx.quadraticCurveTo(
barX + barWidth,
barY + barHeight,
barX + barWidth - maxRadius,
barY + barHeight
);
canvasCtx.lineTo(barX + maxRadius, barY + barHeight);
canvasCtx.quadraticCurveTo(
barX,
barY + barHeight,
barX,
barY + barHeight - maxRadius
);
canvasCtx.lineTo(barX, barY + maxRadius);
canvasCtx.quadraticCurveTo(barX, barY, barX + maxRadius, barY);
canvasCtx.closePath();
canvasCtx.fill();
});
}
canvasCtx.lineTo(canvas.width, canvas.height / 1);
canvasCtx.stroke();
}
};
var audio_recorder_default = AudioRecorder;
var voice_recorder_default = VoiceRecorder;
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
Agent,
AudioPlayer,
AudioRecorder,
Box,

@@ -712,4 +831,6 @@ Client,

JSONSchema,
RunAudioPlayer,
Store,
VoiceRecorder,
createActionSchema
});
{
"name": "@scoopika/client",
"version": "1.0.6",
"version": "1.0.7",
"description": "Run AI agents & boxes on the client side with real-time streaming hooks and client-side actions",

@@ -12,3 +12,3 @@ "main": "./index.js",

"e2e-r": "playwright show-report",
"prettier": "prettier --write .",
"prettier": "prettier --write ./src/",
"testserver": "ts-node -r dotenv/config ./tests/server.ts",

@@ -43,7 +43,6 @@ "httpserver": "http-server"

"dependencies": {
"@scoopika/types": "^2.1.8",
"@scoopika/types": "^2.2.1",
"buffer": "^6.0.3",
"json-schema-to-ts": "^3.1.0",
"rfc4648": "^1.5.3"
"json-schema-to-ts": "^3.1.0"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc