New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

window.ai

Package Overview
Dependencies
Maintainers
2
Versions
8
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

window.ai - npm Package Compare versions

Comparing version 0.1.1 to 0.2.0

56

dist/index.d.ts

@@ -13,8 +13,11 @@ type ChatMessage = {

type Input = PromptInput | MessagesInput;
declare function isPromptInput(input: Input): input is PromptInput;
declare function isMessagesInput(input: Input): input is MessagesInput;
type TextOutput = {
text: string;
isPartial?: boolean;
};
type MessageOutput = {
message: ChatMessage;
isPartial?: boolean;
};

@@ -24,9 +27,10 @@ type Output = TextOutput | MessageOutput;

declare function isMessageOutput(output: Output): output is MessageOutput;
interface CompletionOptions<TModel> {
onStreamResult?: (result: Output | null, error: string | null) => unknown;
type InferredOutput<TInput> = TInput extends MessagesInput ? MessageOutput : TInput extends PromptInput ? TextOutput : Output;
interface CompletionOptions<TModel, TInput extends Input = Input> {
onStreamResult?: (result: InferredOutput<TInput> | null, error: string | null) => unknown;
temperature?: number;
numOutputs?: number;
maxTokens?: number;
stopSequences?: string[];
model?: TModel;
numOutputs?: number;
}

@@ -46,4 +50,15 @@ declare enum ErrorCode {

type EventListenerHandler<T> = (event: EventType, data: T | ErrorCode) => void;
type ModelProviderOptions = {
baseUrl: string;
session?: {
email?: string;
expiresAt?: number;
};
shouldSetDefault?: boolean;
};
declare const VALID_DOMAIN: "https://windowai.io";
interface WindowAI<TModel = string> {
/**
* Metadata containing the domain and version of the extension API
*/
__window_ai_metadata__: {

@@ -53,5 +68,34 @@ domain: typeof VALID_DOMAIN;

};
getCompletion(input: Input, options?: CompletionOptions<TModel>): Promise<Output | Output[]>;
getCurrentModel(): Promise<TModel>;
/** Generate text completions from the specified (or preferred) model.
* @param input The input to use for the completion.
* @param options Options for the completion request.
* @returns A promise that resolves to an array of completion results.
*/
generateText<TInput extends Input = Input>(input: TInput, options?: CompletionOptions<TModel, TInput>): Promise<InferredOutput<TInput>[]>;
/** DEPRECATED: use generate instead
* Get or stream a completion from the specified (or preferred) model.
* @param input The input to use for the completion.
* @param options Options for the completion request.
* @returns A promise that resolves to an array of completion results.
*/
getCompletion<TInput extends Input = Input>(input: TInput, options?: CompletionOptions<TModel, TInput>): Promise<InferredOutput<TInput>[]>;
/** Get the user's current model.
* @returns A promise that resolves to the user's current model, or
* undefined if not available.
*/
getCurrentModel(): Promise<TModel | undefined>;
/**
* Add an event listener for all event types.
* @param handler The handler to call when any event is emitted.
* @returns A request ID that can be used to remove the event listener.
*/
addEventListener<T>(handler: EventListenerHandler<T>): RequestID;
/**
* Update the external model provider.
* @param options The options for the model provider.
* If metadata is undefined, logs out the user.
* @returns A promise that resolves to the user's current model, or
* undefined if not available.
*/
BETA_updateModelProvider(options: ModelProviderOptions): Promise<TModel | undefined>;
}

@@ -73,2 +117,2 @@ declare global {

export { ChatMessage, ChatRole, CompletionOptions, ErrorCode, EventListenerHandler, EventType, Input, MessageOutput, MessagesInput, Output, PromptInput, RequestID, TextOutput, VALID_DOMAIN, WindowAI, getWindowAI, hasWindowAI, isMessageOutput, isMessagesInput, isTextOutput, waitForWindowAI };
export { ChatMessage, ChatRole, CompletionOptions, ErrorCode, EventListenerHandler, EventType, InferredOutput, Input, MessageOutput, MessagesInput, ModelProviderOptions, Output, PromptInput, RequestID, TextOutput, VALID_DOMAIN, WindowAI, getWindowAI, hasWindowAI, isMessageOutput, isMessagesInput, isPromptInput, isTextOutput, waitForWindowAI };
// src/index.ts
function isPromptInput(input) {
return "prompt" in input;
}
function isMessagesInput(input) {

@@ -26,3 +29,3 @@ return "messages" in input;

function hasWindowAI() {
return typeof globalThis.window.ai?.getCompletion === "function";
return typeof globalThis.window.ai?.generateText === "function";
}

@@ -65,2 +68,3 @@ var DEFAULT_WAIT_OPTIONS = {

isMessagesInput,
isPromptInput,
isTextOutput,

@@ -67,0 +71,0 @@ waitForWindowAI

4

package.json
{
"name": "window.ai",
"public": true,
"version": "0.1.1",
"version": "0.2.0",
"files": [

@@ -9,4 +9,4 @@ "dist"

"type": "module",
"types": "./dist/index.d.ts",
"module": "./dist/index.js",
"types": "./dist/index.d.ts",
"exports": {

@@ -13,0 +13,0 @@ ".": {

@@ -22,4 +22,4 @@ # Official window.ai client library.

ai.getCurrentModel()
ai.getCompletion(...)
ai.generateText(...)
ai.addEventListener(...)
```

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc