Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@google/generative-ai

Package Overview
Dependencies
Maintainers
4
Versions
36
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@google/generative-ai - npm Package Compare versions

Comparing version 0.15.0 to 0.16.0

test-integration/node/abort-signal.test.ts

6

CHANGELOG.md
# @google/generative-ai
## 0.16.0
### Minor Changes
- d2d42ca: Adds `SingleRequestOptions` with `AbortSignal` support to most of the asynchronous methods of `GenerativeModel`, `GoogleAIFileManager` and `ChatSession`.
## 0.15.0

@@ -4,0 +10,0 @@

75

dist/generative-ai.d.ts

@@ -83,7 +83,7 @@ /**

params?: StartChatParams;
requestOptions?: RequestOptions;
private _requestOptions;
private _apiKey;
private _history;
private _sendPromise;
constructor(apiKey: string, model: string, params?: StartChatParams, requestOptions?: RequestOptions);
constructor(apiKey: string, model: string, params?: StartChatParams, _requestOptions?: RequestOptions);
/**

@@ -97,5 +97,9 @@ * Gets the chat history so far. Blocked prompts are not added to history.

* Sends a chat message and receives a non-streaming
* {@link GenerateContentResult}
* {@link GenerateContentResult}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
sendMessage(request: string | Array<string | Part>): Promise<GenerateContentResult>;
sendMessage(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
/**

@@ -105,4 +109,8 @@ * Sends a chat message and receives the response as a

* and a response promise.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
sendMessageStream(request: string | Array<string | Part>): Promise<GenerateContentStreamResult>;
sendMessageStream(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
}

@@ -651,2 +659,3 @@

apiKey: string;
private _requestOptions;
model: string;

@@ -660,15 +669,23 @@ generationConfig: GenerationConfig;

cachedContent: CachedContent;
constructor(apiKey: string, modelParams: ModelParams, requestOptions?: RequestOptions);
constructor(apiKey: string, modelParams: ModelParams, _requestOptions?: RequestOptions);
/**
* Makes a single non-streaming call to the model
* and returns an object containing a single {@link GenerateContentResponse}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
generateContent(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentResult>;
generateContent(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
/**
* Makes a single streaming call to the model
* and returns an object containing an iterable stream that iterates
* over all chunks in the streaming response as well as
* a promise that returns the final aggregated response.
* Makes a single streaming call to the model and returns an object
* containing an iterable stream that iterates over all chunks in the
* streaming response as well as a promise that returns the final
* aggregated response.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentStreamResult>;
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
/**

@@ -681,12 +698,24 @@ * Gets a new {@link ChatSession} instance which can be used for

* Counts the tokens in the provided request.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
countTokens(request: CountTokensRequest | string | Array<string | Part>): Promise<CountTokensResponse>;
countTokens(request: CountTokensRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<CountTokensResponse>;
/**
* Embeds the provided content.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
embedContent(request: EmbedContentRequest | string | Array<string | Part>): Promise<EmbedContentResponse>;
embedContent(request: EmbedContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<EmbedContentResponse>;
/**
* Embeds an array of {@link EmbedContentRequest}s.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest): Promise<BatchEmbedContentsResponse>;
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest, requestOptions?: SingleRequestOptions): Promise<BatchEmbedContentsResponse>;
}

@@ -945,2 +974,18 @@

/**
* Params passed to atomic asynchronous operations.
* @public
*/
export declare interface SingleRequestOptions extends RequestOptions {
/**
* An object that may be used to abort asynchronous requests. The request may
* also be aborted due to the expiration of the timeout value, if provided.
*
* NOTE: AbortSignal is a client-only operation. Using it to cancel an
* operation will not cancel the request in the service. You will still
* be charged usage for any applicable operations.
*/
signal?: AbortSignal;
}
/**
* Params for {@link GenerativeModel.startChat}.

@@ -947,0 +992,0 @@ * @public

105

dist/index.js

@@ -286,3 +286,3 @@ 'use strict';

*/
const PACKAGE_VERSION = "0.15.0";
const PACKAGE_VERSION = "0.16.0";
const PACKAGE_LOG_HEADER = "genai-js";

@@ -362,3 +362,3 @@ var Task;

}
async function makeModelRequest(model, task, apiKey, stream, body, requestOptions,
async function makeModelRequest(model, task, apiKey, stream, body, requestOptions = {},
// Allows this to be stubbed for tests

@@ -414,7 +414,13 @@ fetchFn = fetch) {

const fetchOptions = {};
if ((requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) >= 0) {
const abortController = new AbortController();
const signal = abortController.signal;
setTimeout(() => abortController.abort(), requestOptions.timeout);
fetchOptions.signal = signal;
if ((requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.signal) !== undefined || (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) >= 0) {
const controller = new AbortController();
if ((requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) >= 0) {
setTimeout(() => controller.abort(), requestOptions.timeout);
}
if (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.signal) {
requestOptions.signal.addEventListener("abort", () => {
controller.abort();
});
}
fetchOptions.signal = controller.signal;
}

@@ -1061,6 +1067,6 @@ return fetchOptions;

class ChatSession {
constructor(apiKey, model, params, requestOptions) {
constructor(apiKey, model, params, _requestOptions = {}) {
this.model = model;
this.params = params;
this.requestOptions = requestOptions;
this._requestOptions = _requestOptions;
this._history = [];

@@ -1085,5 +1091,9 @@ this._sendPromise = Promise.resolve();

* Sends a chat message and receives a non-streaming
* {@link GenerateContentResult}
* {@link GenerateContentResult}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async sendMessage(request) {
async sendMessage(request, requestOptions = {}) {
var _a, _b, _c, _d, _e, _f;

@@ -1101,6 +1111,7 @@ await this._sendPromise;

};
const chatSessionRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
let finalResult;
// Add onto the chain.
this._sendPromise = this._sendPromise
.then(() => generateContent(this._apiKey, this.model, generateContentRequest, this.requestOptions))
.then(() => generateContent(this._apiKey, this.model, generateContentRequest, chatSessionRequestOptions))
.then((result) => {

@@ -1131,4 +1142,8 @@ var _a;

* and a response promise.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async sendMessageStream(request) {
async sendMessageStream(request, requestOptions = {}) {
var _a, _b, _c, _d, _e, _f;

@@ -1146,3 +1161,4 @@ await this._sendPromise;

};
const streamPromise = generateContentStream(this._apiKey, this.model, generateContentRequest, this.requestOptions);
const chatSessionRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
const streamPromise = generateContentStream(this._apiKey, this.model, generateContentRequest, chatSessionRequestOptions);
// Add onto the chain.

@@ -1204,4 +1220,4 @@ this._sendPromise = this._sendPromise

*/
async function countTokens(apiKey, model, params, requestOptions) {
const response = await makeModelRequest(model, Task.COUNT_TOKENS, apiKey, false, JSON.stringify(params), requestOptions);
async function countTokens(apiKey, model, params, singleRequestOptions) {
const response = await makeModelRequest(model, Task.COUNT_TOKENS, apiKey, false, JSON.stringify(params), singleRequestOptions);
return response.json();

@@ -1259,4 +1275,5 @@ }

class GenerativeModel {
constructor(apiKey, modelParams, requestOptions) {
constructor(apiKey, modelParams, _requestOptions = {}) {
this.apiKey = apiKey;
this._requestOptions = _requestOptions;
if (modelParams.model.includes("/")) {

@@ -1276,3 +1293,2 @@ // Models may be named "models/model-name" or "tunedModels/model-name"

this.cachedContent = modelParams.cachedContent;
this.requestOptions = requestOptions || {};
}

@@ -1282,18 +1298,28 @@ /**

* and returns an object containing a single {@link GenerateContentResponse}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async generateContent(request) {
async generateContent(request, requestOptions = {}) {
var _a;
const formattedParams = formatGenerateContentInput(request);
return generateContent(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (_a = this.cachedContent) === null || _a === void 0 ? void 0 : _a.name }, formattedParams), this.requestOptions);
const generativeModelRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
return generateContent(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (_a = this.cachedContent) === null || _a === void 0 ? void 0 : _a.name }, formattedParams), generativeModelRequestOptions);
}
/**
* Makes a single streaming call to the model
* and returns an object containing an iterable stream that iterates
* over all chunks in the streaming response as well as
* a promise that returns the final aggregated response.
* Makes a single streaming call to the model and returns an object
* containing an iterable stream that iterates over all chunks in the
* streaming response as well as a promise that returns the final
* aggregated response.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async generateContentStream(request) {
async generateContentStream(request, requestOptions = {}) {
var _a;
const formattedParams = formatGenerateContentInput(request);
return generateContentStream(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (_a = this.cachedContent) === null || _a === void 0 ? void 0 : _a.name }, formattedParams), this.requestOptions);
const generativeModelRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
return generateContentStream(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction, cachedContent: (_a = this.cachedContent) === null || _a === void 0 ? void 0 : _a.name }, formattedParams), generativeModelRequestOptions);
}

@@ -1310,4 +1336,8 @@ /**

* Counts the tokens in the provided request.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async countTokens(request) {
async countTokens(request, requestOptions = {}) {
const formattedParams = formatCountTokensInput(request, {

@@ -1322,16 +1352,27 @@ model: this.model,

});
return countTokens(this.apiKey, this.model, formattedParams, this.requestOptions);
const generativeModelRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
return countTokens(this.apiKey, this.model, formattedParams, generativeModelRequestOptions);
}
/**
* Embeds the provided content.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async embedContent(request) {
async embedContent(request, requestOptions = {}) {
const formattedParams = formatEmbedContentInput(request);
return embedContent(this.apiKey, this.model, formattedParams, this.requestOptions);
const generativeModelRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
return embedContent(this.apiKey, this.model, formattedParams, generativeModelRequestOptions);
}
/**
* Embeds an array of {@link EmbedContentRequest}s.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async batchEmbedContents(batchEmbedContentRequest) {
return batchEmbedContents(this.apiKey, this.model, batchEmbedContentRequest, this.requestOptions);
async batchEmbedContents(batchEmbedContentRequest, requestOptions = {}) {
const generativeModelRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
return batchEmbedContents(this.apiKey, this.model, batchEmbedContentRequest, generativeModelRequestOptions);
}

@@ -1338,0 +1379,0 @@ }

@@ -72,3 +72,3 @@ 'use strict';

*/
const PACKAGE_VERSION = "0.15.0";
const PACKAGE_VERSION = "0.16.0";
const PACKAGE_LOG_HEADER = "genai-js";

@@ -251,10 +251,17 @@ var Task;

/**
* Get AbortSignal if timeout is specified
* Create an AbortSignal based on the timeout and signal in the
* RequestOptions.
*/
function getSignal(requestOptions) {
if ((requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) >= 0) {
const abortController = new AbortController();
const signal = abortController.signal;
setTimeout(() => abortController.abort(), requestOptions.timeout);
return signal;
if ((requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.signal) !== undefined || (requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) >= 0) {
const controller = new AbortController();
if ((requestOptions === null || requestOptions === void 0 ? void 0 : requestOptions.timeout) >= 0) {
setTimeout(() => controller.abort(), requestOptions.timeout);
}
if (requestOptions.signal) {
requestOptions.signal.addEventListener("abort", () => {
controller.abort();
});
}
return controller.signal;
}

@@ -284,3 +291,3 @@ }

class GoogleAIFileManager {
constructor(apiKey, _requestOptions) {
constructor(apiKey, _requestOptions = {}) {
this.apiKey = apiKey;

@@ -290,3 +297,3 @@ this._requestOptions = _requestOptions;

/**
* Upload a file
* Upload a file.
*/

@@ -320,6 +327,11 @@ async uploadFile(filePath, fileMetadata) {

/**
* List all uploaded files
* List all uploaded files.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
async listFiles(listParams) {
const url = new FilesRequestUrl(RpcTask.LIST, this.apiKey, this._requestOptions);
async listFiles(listParams, requestOptions = {}) {
const filesRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
const url = new FilesRequestUrl(RpcTask.LIST, this.apiKey, filesRequestOptions);
if (listParams === null || listParams === void 0 ? void 0 : listParams.pageSize) {

@@ -336,6 +348,11 @@ url.appendParam("pageSize", listParams.pageSize.toString());

/**
* Get metadata for file with given ID
* Get metadata for file with given ID.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
async getFile(fileId) {
const url = new FilesRequestUrl(RpcTask.GET, this.apiKey, this._requestOptions);
async getFile(fileId, requestOptions = {}) {
const filesRequestOptions = Object.assign(Object.assign({}, this._requestOptions), requestOptions);
const url = new FilesRequestUrl(RpcTask.GET, this.apiKey, filesRequestOptions);
url.appendPath(parseFileId(fileId));

@@ -347,3 +364,3 @@ const uploadHeaders = getHeaders(url);

/**
* Delete file with given ID
* Delete file with given ID.
*/

@@ -350,0 +367,0 @@ async deleteFile(fileId) {

@@ -495,18 +495,26 @@ /**

apiKey: string;
private _requestOptions?;
private _requestOptions;
constructor(apiKey: string, _requestOptions?: RequestOptions);
/**
* Upload a file
* Upload a file.
*/
uploadFile(filePath: string, fileMetadata: FileMetadata): Promise<UploadFileResponse>;
/**
* List all uploaded files
* List all uploaded files.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
listFiles(listParams?: ListParams): Promise<ListFilesResponse>;
listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise<ListFilesResponse>;
/**
* Get metadata for file with given ID
* Get metadata for file with given ID.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
getFile(fileId: string): Promise<FileMetadataResponse>;
getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise<FileMetadataResponse>;
/**
* Delete file with given ID
* Delete file with given ID.
*/

@@ -676,2 +684,18 @@ deleteFile(fileId: string): Promise<void>;

/**
* Params passed to atomic asynchronous operations.
* @public
*/
declare interface SingleRequestOptions extends RequestOptions {
/**
* An object that may be used to abort asynchronous requests. The request may
* also be aborted due to the expiration of the timeout value, if provided.
*
* NOTE: AbortSignal is a client-only operation. Using it to cancel an
* operation will not cancel the request in the service. You will still
* be charged usage for any applicable operations.
*/
signal?: AbortSignal;
}
/**
* Content part interface if the part represents a text string.

@@ -678,0 +702,0 @@ * @public

@@ -17,3 +17,3 @@ /**

*/
import { Content, GenerateContentResult, GenerateContentStreamResult, Part, RequestOptions, StartChatParams } from "../../types";
import { Content, GenerateContentResult, GenerateContentStreamResult, Part, RequestOptions, SingleRequestOptions, StartChatParams } from "../../types";
/**

@@ -28,7 +28,7 @@ * ChatSession class that enables sending chat messages and stores

params?: StartChatParams;
requestOptions?: RequestOptions;
private _requestOptions;
private _apiKey;
private _history;
private _sendPromise;
constructor(apiKey: string, model: string, params?: StartChatParams, requestOptions?: RequestOptions);
constructor(apiKey: string, model: string, params?: StartChatParams, _requestOptions?: RequestOptions);
/**

@@ -42,5 +42,9 @@ * Gets the chat history so far. Blocked prompts are not added to history.

* Sends a chat message and receives a non-streaming
* {@link GenerateContentResult}
* {@link GenerateContentResult}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
sendMessage(request: string | Array<string | Part>): Promise<GenerateContentResult>;
sendMessage(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
/**

@@ -50,4 +54,8 @@ * Sends a chat message and receives the response as a

* and a response promise.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
sendMessageStream(request: string | Array<string | Part>): Promise<GenerateContentStreamResult>;
sendMessageStream(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
}

@@ -17,3 +17,3 @@ /**

*/
import { CountTokensRequest, CountTokensResponse, RequestOptions } from "../../types";
export declare function countTokens(apiKey: string, model: string, params: CountTokensRequest, requestOptions?: RequestOptions): Promise<CountTokensResponse>;
import { CountTokensRequest, CountTokensResponse, SingleRequestOptions } from "../../types";
export declare function countTokens(apiKey: string, model: string, params: CountTokensRequest, singleRequestOptions: SingleRequestOptions): Promise<CountTokensResponse>;

@@ -17,4 +17,4 @@ /**

*/
import { GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, RequestOptions } from "../../types";
export declare function generateContentStream(apiKey: string, model: string, params: GenerateContentRequest, requestOptions?: RequestOptions): Promise<GenerateContentStreamResult>;
export declare function generateContent(apiKey: string, model: string, params: GenerateContentRequest, requestOptions?: RequestOptions): Promise<GenerateContentResult>;
import { GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, SingleRequestOptions } from "../../types";
export declare function generateContentStream(apiKey: string, model: string, params: GenerateContentRequest, requestOptions: SingleRequestOptions): Promise<GenerateContentStreamResult>;
export declare function generateContent(apiKey: string, model: string, params: GenerateContentRequest, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;

@@ -17,3 +17,3 @@ /**

*/
import { BatchEmbedContentsRequest, BatchEmbedContentsResponse, CachedContent, Content, CountTokensRequest, CountTokensResponse, EmbedContentRequest, EmbedContentResponse, GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, GenerationConfig, ModelParams, Part, RequestOptions, SafetySetting, StartChatParams, Tool, ToolConfig } from "../../types";
import { BatchEmbedContentsRequest, BatchEmbedContentsResponse, CachedContent, Content, CountTokensRequest, CountTokensResponse, EmbedContentRequest, EmbedContentResponse, GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, GenerationConfig, ModelParams, Part, RequestOptions, SafetySetting, SingleRequestOptions, StartChatParams, Tool, ToolConfig } from "../../types";
import { ChatSession } from "../methods/chat-session";

@@ -26,2 +26,3 @@ /**

apiKey: string;
private _requestOptions;
model: string;

@@ -35,15 +36,23 @@ generationConfig: GenerationConfig;

cachedContent: CachedContent;
constructor(apiKey: string, modelParams: ModelParams, requestOptions?: RequestOptions);
constructor(apiKey: string, modelParams: ModelParams, _requestOptions?: RequestOptions);
/**
* Makes a single non-streaming call to the model
* and returns an object containing a single {@link GenerateContentResponse}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
generateContent(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentResult>;
generateContent(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
/**
* Makes a single streaming call to the model
* and returns an object containing an iterable stream that iterates
* over all chunks in the streaming response as well as
* a promise that returns the final aggregated response.
* Makes a single streaming call to the model and returns an object
* containing an iterable stream that iterates over all chunks in the
* streaming response as well as a promise that returns the final
* aggregated response.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentStreamResult>;
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
/**

@@ -56,12 +65,24 @@ * Gets a new {@link ChatSession} instance which can be used for

* Counts the tokens in the provided request.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
countTokens(request: CountTokensRequest | string | Array<string | Part>): Promise<CountTokensResponse>;
countTokens(request: CountTokensRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<CountTokensResponse>;
/**
* Embeds the provided content.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
embedContent(request: EmbedContentRequest | string | Array<string | Part>): Promise<EmbedContentResponse>;
embedContent(request: EmbedContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<EmbedContentResponse>;
/**
* Embeds an array of {@link EmbedContentRequest}s.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest): Promise<BatchEmbedContentsResponse>;
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest, requestOptions?: SingleRequestOptions): Promise<BatchEmbedContentsResponse>;
}

@@ -17,3 +17,3 @@ /**

*/
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
export declare const DEFAULT_BASE_URL = "https://generativelanguage.googleapis.com";

@@ -42,7 +42,7 @@ export declare const DEFAULT_API_VERSION = "v1beta";

export declare function getHeaders(url: RequestUrl): Promise<Headers>;
export declare function constructModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions): Promise<{
export declare function constructModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions: SingleRequestOptions): Promise<{
url: string;
fetchOptions: RequestInit;
}>;
export declare function makeModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions, fetchFn?: typeof fetch): Promise<Response>;
export declare function makeModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: SingleRequestOptions, fetchFn?: typeof fetch): Promise<Response>;
export declare function makeRequest(url: string, fetchOptions: RequestInit, fetchFn?: typeof fetch): Promise<Response>;

@@ -17,3 +17,3 @@ /**

*/
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import { FileMetadata, FileMetadataResponse, ListFilesResponse, ListParams, UploadFileResponse } from "../../types/server";

@@ -30,18 +30,26 @@ export interface UploadMetadata {

apiKey: string;
private _requestOptions?;
private _requestOptions;
constructor(apiKey: string, _requestOptions?: RequestOptions);
/**
* Upload a file
* Upload a file.
*/
uploadFile(filePath: string, fileMetadata: FileMetadata): Promise<UploadFileResponse>;
/**
* List all uploaded files
* List all uploaded files.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
listFiles(listParams?: ListParams): Promise<ListFilesResponse>;
listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise<ListFilesResponse>;
/**
* Get metadata for file with given ID
* Get metadata for file with given ID.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
getFile(fileId: string): Promise<FileMetadataResponse>;
getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise<FileMetadataResponse>;
/**
* Delete file with given ID
* Delete file with given ID.
*/

@@ -48,0 +56,0 @@ deleteFile(fileId: string): Promise<void>;

@@ -17,3 +17,3 @@ /**

*/
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import { RpcTask } from "./constants";

@@ -23,5 +23,5 @@ export declare class ServerRequestUrl {

apiKey: string;
requestOptions?: RequestOptions;
requestOptions?: SingleRequestOptions;
protected _url: URL;
constructor(task: RpcTask, apiKey: string, requestOptions?: RequestOptions);
constructor(task: RpcTask, apiKey: string, requestOptions?: SingleRequestOptions);
appendPath(path: string): void;

@@ -28,0 +28,0 @@ appendParam(key: string, value: string): void;

@@ -174,2 +174,17 @@ /**

/**
* Params passed to atomic asynchronous operations.
* @public
*/
export interface SingleRequestOptions extends RequestOptions {
/**
* An object that may be used to abort asynchronous requests. The request may
* also be aborted due to the expiration of the timeout value, if provided.
*
* NOTE: AbortSignal is a client-only operation. Using it to cancel an
* operation will not cancel the request in the service. You will still
* be charged usage for any applicable operations.
*/
signal?: AbortSignal;
}
/**
* Defines a tool that model can call to access external knowledge.

@@ -176,0 +191,0 @@ * @public

@@ -17,3 +17,3 @@ /**

*/
import { Content, GenerateContentResult, GenerateContentStreamResult, Part, RequestOptions, StartChatParams } from "../../types";
import { Content, GenerateContentResult, GenerateContentStreamResult, Part, RequestOptions, SingleRequestOptions, StartChatParams } from "../../types";
/**

@@ -28,7 +28,7 @@ * ChatSession class that enables sending chat messages and stores

params?: StartChatParams;
requestOptions?: RequestOptions;
private _requestOptions;
private _apiKey;
private _history;
private _sendPromise;
constructor(apiKey: string, model: string, params?: StartChatParams, requestOptions?: RequestOptions);
constructor(apiKey: string, model: string, params?: StartChatParams, _requestOptions?: RequestOptions);
/**

@@ -42,5 +42,9 @@ * Gets the chat history so far. Blocked prompts are not added to history.

* Sends a chat message and receives a non-streaming
* {@link GenerateContentResult}
* {@link GenerateContentResult}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
sendMessage(request: string | Array<string | Part>): Promise<GenerateContentResult>;
sendMessage(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
/**

@@ -50,4 +54,8 @@ * Sends a chat message and receives the response as a

* and a response promise.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
sendMessageStream(request: string | Array<string | Part>): Promise<GenerateContentStreamResult>;
sendMessageStream(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
}

@@ -17,3 +17,3 @@ /**

*/
import { CountTokensRequest, CountTokensResponse, RequestOptions } from "../../types";
export declare function countTokens(apiKey: string, model: string, params: CountTokensRequest, requestOptions?: RequestOptions): Promise<CountTokensResponse>;
import { CountTokensRequest, CountTokensResponse, SingleRequestOptions } from "../../types";
export declare function countTokens(apiKey: string, model: string, params: CountTokensRequest, singleRequestOptions: SingleRequestOptions): Promise<CountTokensResponse>;

@@ -17,4 +17,4 @@ /**

*/
import { GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, RequestOptions } from "../../types";
export declare function generateContentStream(apiKey: string, model: string, params: GenerateContentRequest, requestOptions?: RequestOptions): Promise<GenerateContentStreamResult>;
export declare function generateContent(apiKey: string, model: string, params: GenerateContentRequest, requestOptions?: RequestOptions): Promise<GenerateContentResult>;
import { GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, SingleRequestOptions } from "../../types";
export declare function generateContentStream(apiKey: string, model: string, params: GenerateContentRequest, requestOptions: SingleRequestOptions): Promise<GenerateContentStreamResult>;
export declare function generateContent(apiKey: string, model: string, params: GenerateContentRequest, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;

@@ -17,3 +17,3 @@ /**

*/
import { BatchEmbedContentsRequest, BatchEmbedContentsResponse, CachedContent, Content, CountTokensRequest, CountTokensResponse, EmbedContentRequest, EmbedContentResponse, GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, GenerationConfig, ModelParams, Part, RequestOptions, SafetySetting, StartChatParams, Tool, ToolConfig } from "../../types";
import { BatchEmbedContentsRequest, BatchEmbedContentsResponse, CachedContent, Content, CountTokensRequest, CountTokensResponse, EmbedContentRequest, EmbedContentResponse, GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, GenerationConfig, ModelParams, Part, RequestOptions, SafetySetting, SingleRequestOptions, StartChatParams, Tool, ToolConfig } from "../../types";
import { ChatSession } from "../methods/chat-session";

@@ -26,2 +26,3 @@ /**

apiKey: string;
private _requestOptions;
model: string;

@@ -35,15 +36,23 @@ generationConfig: GenerationConfig;

cachedContent: CachedContent;
constructor(apiKey: string, modelParams: ModelParams, requestOptions?: RequestOptions);
constructor(apiKey: string, modelParams: ModelParams, _requestOptions?: RequestOptions);
/**
* Makes a single non-streaming call to the model
* and returns an object containing a single {@link GenerateContentResponse}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
generateContent(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentResult>;
generateContent(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
/**
* Makes a single streaming call to the model
* and returns an object containing an iterable stream that iterates
* over all chunks in the streaming response as well as
* a promise that returns the final aggregated response.
* Makes a single streaming call to the model and returns an object
* containing an iterable stream that iterates over all chunks in the
* streaming response as well as a promise that returns the final
* aggregated response.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentStreamResult>;
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
/**

@@ -56,12 +65,24 @@ * Gets a new {@link ChatSession} instance which can be used for

* Counts the tokens in the provided request.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
countTokens(request: CountTokensRequest | string | Array<string | Part>): Promise<CountTokensResponse>;
countTokens(request: CountTokensRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<CountTokensResponse>;
/**
* Embeds the provided content.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
embedContent(request: EmbedContentRequest | string | Array<string | Part>): Promise<EmbedContentResponse>;
embedContent(request: EmbedContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<EmbedContentResponse>;
/**
* Embeds an array of {@link EmbedContentRequest}s.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest): Promise<BatchEmbedContentsResponse>;
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest, requestOptions?: SingleRequestOptions): Promise<BatchEmbedContentsResponse>;
}

@@ -17,3 +17,3 @@ /**

*/
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
export declare const DEFAULT_BASE_URL = "https://generativelanguage.googleapis.com";

@@ -42,7 +42,7 @@ export declare const DEFAULT_API_VERSION = "v1beta";

export declare function getHeaders(url: RequestUrl): Promise<Headers>;
export declare function constructModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions): Promise<{
export declare function constructModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions: SingleRequestOptions): Promise<{
url: string;
fetchOptions: RequestInit;
}>;
export declare function makeModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions, fetchFn?: typeof fetch): Promise<Response>;
export declare function makeModelRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: SingleRequestOptions, fetchFn?: typeof fetch): Promise<Response>;
export declare function makeRequest(url: string, fetchOptions: RequestInit, fetchFn?: typeof fetch): Promise<Response>;

@@ -17,3 +17,3 @@ /**

*/
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import { FileMetadata, FileMetadataResponse, ListFilesResponse, ListParams, UploadFileResponse } from "../../types/server";

@@ -30,18 +30,26 @@ export interface UploadMetadata {

apiKey: string;
private _requestOptions?;
private _requestOptions;
constructor(apiKey: string, _requestOptions?: RequestOptions);
/**
* Upload a file
* Upload a file.
*/
uploadFile(filePath: string, fileMetadata: FileMetadata): Promise<UploadFileResponse>;
/**
* List all uploaded files
* List all uploaded files.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
listFiles(listParams?: ListParams): Promise<ListFilesResponse>;
listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise<ListFilesResponse>;
/**
* Get metadata for file with given ID
* Get metadata for file with given ID.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
getFile(fileId: string): Promise<FileMetadataResponse>;
getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise<FileMetadataResponse>;
/**
* Delete file with given ID
* Delete file with given ID.
*/

@@ -48,0 +56,0 @@ deleteFile(fileId: string): Promise<void>;

@@ -17,3 +17,3 @@ /**

*/
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import { RpcTask } from "./constants";

@@ -23,5 +23,5 @@ export declare class ServerRequestUrl {

apiKey: string;
requestOptions?: RequestOptions;
requestOptions?: SingleRequestOptions;
protected _url: URL;
constructor(task: RpcTask, apiKey: string, requestOptions?: RequestOptions);
constructor(task: RpcTask, apiKey: string, requestOptions?: SingleRequestOptions);
appendPath(path: string): void;

@@ -28,0 +28,0 @@ appendParam(key: string, value: string): void;

@@ -174,2 +174,17 @@ /**

/**
* Params passed to atomic asynchronous operations.
* @public
*/
export interface SingleRequestOptions extends RequestOptions {
/**
* An object that may be used to abort asynchronous requests. The request may
* also be aborted due to the expiration of the timeout value, if provided.
*
* NOTE: AbortSignal is a client-only operation. Using it to cancel an
* operation will not cancel the request in the service. You will still
* be charged usage for any applicable operations.
*/
signal?: AbortSignal;
}
/**
* Defines a tool that model can call to access external knowledge.

@@ -176,0 +191,0 @@ * @public

{
"name": "@google/generative-ai",
"version": "0.15.0",
"version": "0.16.0",
"description": "Google AI JavaScript SDK",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

@@ -25,2 +25,3 @@ /**

RequestOptions,
SingleRequestOptions,
StartChatParams,

@@ -53,3 +54,3 @@ } from "../../types";

public params?: StartChatParams,
public requestOptions?: RequestOptions,
private _requestOptions: RequestOptions = {},
) {

@@ -75,6 +76,11 @@ this._apiKey = apiKey;

* Sends a chat message and receives a non-streaming
* {@link GenerateContentResult}
* {@link GenerateContentResult}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async sendMessage(
request: string | Array<string | Part>,
requestOptions: SingleRequestOptions = {},
): Promise<GenerateContentResult> {

@@ -92,2 +98,6 @@ await this._sendPromise;

};
const chatSessionRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
let finalResult;

@@ -101,3 +111,3 @@ // Add onto the chain.

generateContentRequest,
this.requestOptions,
chatSessionRequestOptions,
),

@@ -136,5 +146,10 @@ )

* and a response promise.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async sendMessageStream(
request: string | Array<string | Part>,
requestOptions: SingleRequestOptions = {},
): Promise<GenerateContentStreamResult> {

@@ -152,2 +167,6 @@ await this._sendPromise;

};
const chatSessionRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
const streamPromise = generateContentStream(

@@ -157,3 +176,3 @@ this._apiKey,

generateContentRequest,
this.requestOptions,
chatSessionRequestOptions,
);

@@ -160,0 +179,0 @@

@@ -21,3 +21,3 @@ /**

CountTokensResponse,
RequestOptions,
SingleRequestOptions,
} from "../../types";

@@ -30,3 +30,3 @@ import { Task, makeModelRequest } from "../requests/request";

params: CountTokensRequest,
requestOptions?: RequestOptions,
singleRequestOptions: SingleRequestOptions,
): Promise<CountTokensResponse> {

@@ -39,5 +39,5 @@ const response = await makeModelRequest(

JSON.stringify(params),
requestOptions,
singleRequestOptions,
);
return response.json();
}

@@ -23,3 +23,3 @@ /**

GenerateContentStreamResult,
RequestOptions,
SingleRequestOptions,
} from "../../types";

@@ -34,3 +34,3 @@ import { Task, makeModelRequest } from "../requests/request";

params: GenerateContentRequest,
requestOptions?: RequestOptions,
requestOptions: SingleRequestOptions,
): Promise<GenerateContentStreamResult> {

@@ -52,3 +52,3 @@ const response = await makeModelRequest(

params: GenerateContentRequest,
requestOptions?: RequestOptions,
requestOptions?: SingleRequestOptions,
): Promise<GenerateContentResult> {

@@ -55,0 +55,0 @@ const response = await makeModelRequest(

@@ -39,2 +39,3 @@ /**

SafetySetting,
SingleRequestOptions,
StartChatParams,

@@ -71,3 +72,3 @@ Tool,

modelParams: ModelParams,
requestOptions?: RequestOptions,
private _requestOptions: RequestOptions = {},
) {

@@ -89,3 +90,2 @@ if (modelParams.model.includes("/")) {

this.cachedContent = modelParams.cachedContent;
this.requestOptions = requestOptions || {};
}

@@ -96,7 +96,16 @@

* and returns an object containing a single {@link GenerateContentResponse}.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async generateContent(
request: GenerateContentRequest | string | Array<string | Part>,
requestOptions: SingleRequestOptions = {},
): Promise<GenerateContentResult> {
const formattedParams = formatGenerateContentInput(request);
const generativeModelRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
return generateContent(

@@ -114,3 +123,3 @@ this.apiKey,

},
this.requestOptions,
generativeModelRequestOptions,
);

@@ -120,11 +129,20 @@ }

/**
* Makes a single streaming call to the model
* and returns an object containing an iterable stream that iterates
* over all chunks in the streaming response as well as
* a promise that returns the final aggregated response.
* Makes a single streaming call to the model and returns an object
* containing an iterable stream that iterates over all chunks in the
* streaming response as well as a promise that returns the final
* aggregated response.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async generateContentStream(
request: GenerateContentRequest | string | Array<string | Part>,
requestOptions: SingleRequestOptions = {},
): Promise<GenerateContentStreamResult> {
const formattedParams = formatGenerateContentInput(request);
const generativeModelRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
return generateContentStream(

@@ -142,3 +160,3 @@ this.apiKey,

},
this.requestOptions,
generativeModelRequestOptions,
);

@@ -170,5 +188,10 @@ }

* Counts the tokens in the provided request.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async countTokens(
request: CountTokensRequest | string | Array<string | Part>,
requestOptions: SingleRequestOptions = {},
): Promise<CountTokensResponse> {

@@ -184,2 +207,6 @@ const formattedParams = formatCountTokensInput(request, {

});
const generativeModelRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
return countTokens(

@@ -189,3 +216,3 @@ this.apiKey,

formattedParams,
this.requestOptions,
generativeModelRequestOptions,
);

@@ -196,7 +223,16 @@ }

* Embeds the provided content.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async embedContent(
request: EmbedContentRequest | string | Array<string | Part>,
requestOptions: SingleRequestOptions = {},
): Promise<EmbedContentResponse> {
const formattedParams = formatEmbedContentInput(request);
const generativeModelRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
return embedContent(

@@ -206,3 +242,3 @@ this.apiKey,

formattedParams,
this.requestOptions,
generativeModelRequestOptions,
);

@@ -213,6 +249,15 @@ }

* Embeds an array of {@link EmbedContentRequest}s.
*
* Fields set in the optional {@link SingleRequestOptions} parameter will
* take precedence over the {@link RequestOptions} values provided at the
* time of the {@link GoogleAIFileManager} initialization.
*/
async batchEmbedContents(
batchEmbedContentRequest: BatchEmbedContentsRequest,
requestOptions: SingleRequestOptions = {},
): Promise<BatchEmbedContentsResponse> {
const generativeModelRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
return batchEmbedContents(

@@ -222,5 +267,5 @@ this.apiKey,

batchEmbedContentRequest,
this.requestOptions,
generativeModelRequestOptions,
);
}
}

@@ -18,3 +18,3 @@ /**

import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import {

@@ -120,3 +120,3 @@ GoogleGenerativeAIError,

body: string,
requestOptions?: RequestOptions,
requestOptions: SingleRequestOptions,
): Promise<{ url: string; fetchOptions: RequestInit }> {

@@ -141,3 +141,3 @@ const url = new RequestUrl(model, task, apiKey, stream, requestOptions);

body: string,
requestOptions?: RequestOptions,
requestOptions: SingleRequestOptions = {},
// Allows this to be stubbed for tests

@@ -223,11 +223,17 @@ fetchFn = fetch,

*/
function buildFetchOptions(requestOptions?: RequestOptions): RequestInit {
function buildFetchOptions(requestOptions?: SingleRequestOptions): RequestInit {
const fetchOptions = {} as RequestInit;
if (requestOptions?.timeout >= 0) {
const abortController = new AbortController();
const signal = abortController.signal;
setTimeout(() => abortController.abort(), requestOptions.timeout);
fetchOptions.signal = signal;
if (requestOptions?.signal !== undefined || requestOptions?.timeout >= 0) {
const controller = new AbortController();
if (requestOptions?.timeout >= 0) {
setTimeout(() => controller.abort(), requestOptions.timeout);
}
if (requestOptions?.signal) {
requestOptions.signal.addEventListener("abort", () => {
controller.abort();
});
}
fetchOptions.signal = controller.signal;
}
return fetchOptions;
}

@@ -18,3 +18,3 @@ /**

import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import { readFileSync } from "fs";

@@ -48,7 +48,7 @@ import { FilesRequestUrl, getHeaders, makeServerRequest } from "./request";

public apiKey: string,
private _requestOptions?: RequestOptions,
private _requestOptions: RequestOptions = {},
) {}
/**
* Upload a file
* Upload a file.
*/

@@ -98,9 +98,20 @@ async uploadFile(

/**
* List all uploaded files
* List all uploaded files.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
async listFiles(listParams?: ListParams): Promise<ListFilesResponse> {
async listFiles(
listParams?: ListParams,
requestOptions: SingleRequestOptions = {},
): Promise<ListFilesResponse> {
const filesRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
const url = new FilesRequestUrl(
RpcTask.LIST,
this.apiKey,
this._requestOptions,
filesRequestOptions,
);

@@ -119,9 +130,20 @@ if (listParams?.pageSize) {

/**
* Get metadata for file with given ID
* Get metadata for file with given ID.
*
* Any fields set in the optional {@link SingleRequestOptions} parameter will take
* precedence over the {@link RequestOptions} values provided at the time of the
* {@link GoogleAIFileManager} initialization.
*/
async getFile(fileId: string): Promise<FileMetadataResponse> {
async getFile(
fileId: string,
requestOptions: SingleRequestOptions = {},
): Promise<FileMetadataResponse> {
const filesRequestOptions: SingleRequestOptions = {
...this._requestOptions,
...requestOptions,
};
const url = new FilesRequestUrl(
RpcTask.GET,
this.apiKey,
this._requestOptions,
filesRequestOptions,
);

@@ -135,3 +157,3 @@ url.appendPath(parseFileId(fileId));

/**
* Delete file with given ID
* Delete file with given ID.
*/

@@ -138,0 +160,0 @@ async deleteFile(fileId: string): Promise<void> {

@@ -24,3 +24,3 @@ /**

} from "../requests/request";
import { RequestOptions } from "../../types";
import { RequestOptions, SingleRequestOptions } from "../../types";
import { RpcTask } from "./constants";

@@ -42,3 +42,3 @@

public apiKey: string,
public requestOptions?: RequestOptions,
public requestOptions?: SingleRequestOptions,
) {}

@@ -123,11 +123,18 @@

/**
* Get AbortSignal if timeout is specified
* Create an AbortSignal based on the timeout and signal in the
* RequestOptions.
*/
function getSignal(requestOptions?: RequestOptions): AbortSignal | null {
if (requestOptions?.timeout >= 0) {
const abortController = new AbortController();
const signal = abortController.signal;
setTimeout(() => abortController.abort(), requestOptions.timeout);
return signal;
function getSignal(requestOptions?: SingleRequestOptions): AbortSignal | null {
if (requestOptions?.signal !== undefined || requestOptions?.timeout >= 0) {
const controller = new AbortController();
if (requestOptions?.timeout >= 0) {
setTimeout(() => controller.abort(), requestOptions.timeout);
}
if (requestOptions.signal) {
requestOptions.signal.addEventListener("abort", () => {
controller.abort();
});
}
return controller.signal;
}
}

@@ -358,4 +358,5 @@ ## API Report File for "@google/generative-ai"

deleteFile(fileId: string): Promise<void>;
getFile(fileId: string): Promise<FileMetadataResponse>;
listFiles(listParams?: ListParams): Promise<ListFilesResponse>;
getFile(fileId: string, requestOptions?: SingleRequestOptions): Promise<FileMetadataResponse>;
// Warning: (ae-forgotten-export) The symbol "SingleRequestOptions" needs to be exported by the entry point index.d.ts
listFiles(listParams?: ListParams, requestOptions?: SingleRequestOptions): Promise<ListFilesResponse>;
uploadFile(filePath: string, fileMetadata: FileMetadata): Promise<UploadFileResponse>;

@@ -362,0 +363,0 @@ }

@@ -65,3 +65,3 @@ ## API Report File for "@google/generative-ai"

export class ChatSession {
constructor(apiKey: string, model: string, params?: StartChatParams, requestOptions?: RequestOptions);
constructor(apiKey: string, model: string, params?: StartChatParams, _requestOptions?: RequestOptions);
getHistory(): Promise<Content[]>;

@@ -72,6 +72,4 @@ // (undocumented)

params?: StartChatParams;
// (undocumented)
requestOptions?: RequestOptions;
sendMessage(request: string | Array<string | Part>): Promise<GenerateContentResult>;
sendMessageStream(request: string | Array<string | Part>): Promise<GenerateContentStreamResult>;
sendMessage(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
sendMessageStream(request: string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
}

@@ -467,12 +465,12 @@

export class GenerativeModel {
constructor(apiKey: string, modelParams: ModelParams, requestOptions?: RequestOptions);
constructor(apiKey: string, modelParams: ModelParams, _requestOptions?: RequestOptions);
// (undocumented)
apiKey: string;
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest): Promise<BatchEmbedContentsResponse>;
batchEmbedContents(batchEmbedContentRequest: BatchEmbedContentsRequest, requestOptions?: SingleRequestOptions): Promise<BatchEmbedContentsResponse>;
// (undocumented)
cachedContent: CachedContent;
countTokens(request: CountTokensRequest | string | Array<string | Part>): Promise<CountTokensResponse>;
embedContent(request: EmbedContentRequest | string | Array<string | Part>): Promise<EmbedContentResponse>;
generateContent(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentResult>;
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>): Promise<GenerateContentStreamResult>;
countTokens(request: CountTokensRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<CountTokensResponse>;
embedContent(request: EmbedContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<EmbedContentResponse>;
generateContent(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentResult>;
generateContentStream(request: GenerateContentRequest | string | Array<string | Part>, requestOptions?: SingleRequestOptions): Promise<GenerateContentStreamResult>;
// (undocumented)

@@ -674,2 +672,7 @@ generationConfig: GenerationConfig;

// @public
export interface SingleRequestOptions extends RequestOptions {
signal?: AbortSignal;
}
// @public
export interface StartChatParams extends BaseParams {

@@ -676,0 +679,0 @@ cachedContent?: string;

@@ -193,2 +193,18 @@ /**

/**
* Params passed to atomic asynchronous operations.
* @public
*/
export interface SingleRequestOptions extends RequestOptions {
/**
* An object that may be used to abort asynchronous requests. The request may
* also be aborted due to the expiration of the timeout value, if provided.
*
* NOTE: AbortSignal is a client-only operation. Using it to cancel an
* operation will not cancel the request in the service. You will still
* be charged usage for any applicable operations.
*/
signal?: AbortSignal;
}
/**
* Defines a tool that model can call to access external knowledge.

@@ -195,0 +211,0 @@ * @public

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc