Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@google/generative-ai

Package Overview
Dependencies
Maintainers
4
Versions
36
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@google/generative-ai - npm Package Compare versions

Comparing version 0.5.0 to 0.6.0

36

dist/generative-ai.d.ts

@@ -205,2 +205,20 @@ /**

/**
* @public
*/
export declare interface FunctionCallingConfig {
mode?: FunctionCallingMode;
allowedFunctionNames?: string[];
}
/**
* @public
*/
export declare enum FunctionCallingMode {
MODE_UNSPECIFIED = "MODE_UNSPECIFIED",
AUTO = "AUTO",
ANY = "ANY",
NONE = "NONE"
}
/**
* Content part interface if the part represents FunctionResponse.

@@ -393,2 +411,4 @@ * @public

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
}

@@ -466,2 +486,4 @@

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
constructor(apiKey: string, modelParams: ModelParams, requestOptions?: RequestOptions);

@@ -566,2 +588,4 @@ /**

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
}

@@ -579,3 +603,3 @@

*/
export declare const POSSIBLE_ROLES: readonly ["user", "model", "function"];
export declare const POSSIBLE_ROLES: readonly ["user", "model", "function", "system"];

@@ -643,2 +667,4 @@ /**

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
}

@@ -676,2 +702,10 @@

/**
* Tool config. This config is shared for all tools provided in the request.
* @public
*/
export declare interface ToolConfig {
functionCallingConfig: FunctionCallingConfig;
}
export { }

95

dist/index.js

@@ -23,3 +23,3 @@ 'use strict';

*/
const POSSIBLE_ROLES = ["user", "model", "function"];
const POSSIBLE_ROLES = ["user", "model", "function", "system"];
/**

@@ -116,2 +116,21 @@ * Harm categories that would cause prompts or candidates to be blocked.

})(exports.TaskType || (exports.TaskType = {}));
/**
* @public
*/
exports.FunctionCallingMode = void 0;
(function (FunctionCallingMode) {
// Unspecified function calling mode. This value should not be used.
FunctionCallingMode["MODE_UNSPECIFIED"] = "MODE_UNSPECIFIED";
// Default model behavior, model decides to predict either a function call
// or a natural language repspose.
FunctionCallingMode["AUTO"] = "AUTO";
// Model is constrained to always predicting a function call only.
// If "allowed_function_names" are set, the predicted function call will be
// limited to any one of "allowed_function_names", else the predicted
// function call will be any one of the provided "function_declarations".
FunctionCallingMode["ANY"] = "ANY";
// Model will not predict any function call. Model behavior is same as when
// not passing any function declarations.
FunctionCallingMode["NONE"] = "NONE";
})(exports.FunctionCallingMode || (exports.FunctionCallingMode = {}));

@@ -205,3 +224,3 @@ /**

*/
const PACKAGE_VERSION = "0.5.0";
const PACKAGE_VERSION = "0.6.0";
const PACKAGE_LOG_HEADER = "genai-js";

@@ -246,10 +265,31 @@ var Task;

}
async function makeRequest(url, body, requestOptions) {
async function getHeaders(url) {
const headers = new Headers();
headers.append("Content-Type", "application/json");
headers.append("x-goog-api-client", getClientHeaders(url.requestOptions));
headers.append("x-goog-api-key", url.apiKey);
return headers;
}
async function constructRequest(model, task, apiKey, stream, body, requestOptions) {
const url = new RequestUrl(model, task, apiKey, stream, requestOptions);
return {
url: url.toString(),
fetchOptions: Object.assign(Object.assign({}, buildFetchOptions(requestOptions)), { method: "POST", headers: await getHeaders(url), body }),
};
}
/**
* Wrapper for _makeRequestInternal that automatically uses native fetch,
* allowing _makeRequestInternal to be tested with a mocked fetch function.
*/
async function makeRequest(model, task, apiKey, stream, body, requestOptions) {
return _makeRequestInternal(model, task, apiKey, stream, body, requestOptions, fetch);
}
async function _makeRequestInternal(model, task, apiKey, stream, body, requestOptions,
// Allows this to be stubbed for tests
fetchFn = fetch) {
const url = new RequestUrl(model, task, apiKey, stream, requestOptions);
let response;
try {
response = await fetch(url.toString(), Object.assign(Object.assign({}, buildFetchOptions(requestOptions)), { method: "POST", headers: {
"Content-Type": "application/json",
"x-goog-api-client": getClientHeaders(requestOptions),
"x-goog-api-key": url.apiKey,
}, body }));
const request = await constructRequest(model, task, apiKey, stream, body, requestOptions);
response = await fetchFn(request.url, request.fetchOptions);
if (!response.ok) {

@@ -651,11 +691,9 @@ let message = "";

async function generateContentStream(apiKey, model, params, requestOptions) {
const url = new RequestUrl(model, Task.STREAM_GENERATE_CONTENT, apiKey,
/* stream */ true, requestOptions);
const response = await makeRequest(url, JSON.stringify(params), requestOptions);
const response = await makeRequest(model, Task.STREAM_GENERATE_CONTENT, apiKey,
/* stream */ true, JSON.stringify(params), requestOptions);
return processStream(response);
}
async function generateContent(apiKey, model, params, requestOptions) {
const url = new RequestUrl(model, Task.GENERATE_CONTENT, apiKey,
/* stream */ false, requestOptions);
const response = await makeRequest(url, JSON.stringify(params), requestOptions);
const response = await makeRequest(model, Task.GENERATE_CONTENT, apiKey,
/* stream */ false, JSON.stringify(params), requestOptions);
const responseJson = await response.json();

@@ -779,2 +817,4 @@ const enhancedResponse = addHelpers(responseJson);

model: ["text", "functionCall"],
// System instructions shouldn't be in history anyway.
system: ["text"],
};

@@ -785,2 +825,4 @@ const VALID_PREVIOUS_CONTENT_ROLES = {

model: ["user", "function"],
// System instructions shouldn't be in history.
system: [],
};

@@ -885,3 +927,3 @@ function validateChatHistory(history) {

async sendMessage(request) {
var _a, _b, _c;
var _a, _b, _c, _d, _e;
await this._sendPromise;

@@ -893,2 +935,4 @@ const newContent = formatNewContent(request);

tools: (_c = this.params) === null || _c === void 0 ? void 0 : _c.tools,
toolConfig: (_d = this.params) === null || _d === void 0 ? void 0 : _d.toolConfig,
systemInstruction: (_e = this.params) === null || _e === void 0 ? void 0 : _e.systemInstruction,
contents: [...this._history, newContent],

@@ -927,3 +971,3 @@ };

async sendMessageStream(request) {
var _a, _b, _c;
var _a, _b, _c, _d, _e;
await this._sendPromise;

@@ -935,2 +979,4 @@ const newContent = formatNewContent(request);

tools: (_c = this.params) === null || _c === void 0 ? void 0 : _c.tools,
toolConfig: (_d = this.params) === null || _d === void 0 ? void 0 : _d.toolConfig,
systemInstruction: (_e = this.params) === null || _e === void 0 ? void 0 : _e.systemInstruction,
contents: [...this._history, newContent],

@@ -996,4 +1042,3 @@ };

async function countTokens(apiKey, model, params, requestOptions) {
const url = new RequestUrl(model, Task.COUNT_TOKENS, apiKey, false, requestOptions);
const response = await makeRequest(url, JSON.stringify(Object.assign(Object.assign({}, params), { model })), requestOptions);
const response = await makeRequest(model, Task.COUNT_TOKENS, apiKey, false, JSON.stringify(Object.assign(Object.assign({}, params), { model })), requestOptions);
return response.json();

@@ -1019,12 +1064,10 @@ }

async function embedContent(apiKey, model, params, requestOptions) {
const url = new RequestUrl(model, Task.EMBED_CONTENT, apiKey, false, requestOptions);
const response = await makeRequest(url, JSON.stringify(params), requestOptions);
const response = await makeRequest(model, Task.EMBED_CONTENT, apiKey, false, JSON.stringify(params), requestOptions);
return response.json();
}
async function batchEmbedContents(apiKey, model, params, requestOptions) {
const url = new RequestUrl(model, Task.BATCH_EMBED_CONTENTS, apiKey, false, requestOptions);
const requestsWithModel = params.requests.map((request) => {
return Object.assign(Object.assign({}, request), { model });
});
const response = await makeRequest(url, JSON.stringify({ requests: requestsWithModel }), requestOptions);
const response = await makeRequest(model, Task.BATCH_EMBED_CONTENTS, apiKey, false, JSON.stringify({ requests: requestsWithModel }), requestOptions);
return response.json();

@@ -1067,2 +1110,4 @@ }

this.tools = modelParams.tools;
this.toolConfig = modelParams.toolConfig;
this.systemInstruction = modelParams.systemInstruction;
this.requestOptions = requestOptions || {};

@@ -1076,3 +1121,3 @@ }

const formattedParams = formatGenerateContentInput(request);
return generateContent(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools }, formattedParams), this.requestOptions);
return generateContent(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction }, formattedParams), this.requestOptions);
}

@@ -1087,3 +1132,3 @@ /**

const formattedParams = formatGenerateContentInput(request);
return generateContentStream(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools }, formattedParams), this.requestOptions);
return generateContentStream(this.apiKey, this.model, Object.assign({ generationConfig: this.generationConfig, safetySettings: this.safetySettings, tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction }, formattedParams), this.requestOptions);
}

@@ -1095,3 +1140,3 @@ /**

startChat(startChatParams) {
return new ChatSession(this.apiKey, this.model, Object.assign({ tools: this.tools }, startChatParams), this.requestOptions);
return new ChatSession(this.apiKey, this.model, Object.assign({ tools: this.tools, toolConfig: this.toolConfig, systemInstruction: this.systemInstruction }, startChatParams), this.requestOptions);
}

@@ -1098,0 +1143,0 @@ /**

@@ -17,3 +17,3 @@ /**

*/
import { BatchEmbedContentsRequest, BatchEmbedContentsResponse, CountTokensRequest, CountTokensResponse, EmbedContentRequest, EmbedContentResponse, GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, GenerationConfig, ModelParams, Part, RequestOptions, SafetySetting, StartChatParams, Tool } from "../../types";
import { BatchEmbedContentsRequest, BatchEmbedContentsResponse, Content, CountTokensRequest, CountTokensResponse, EmbedContentRequest, EmbedContentResponse, GenerateContentRequest, GenerateContentResult, GenerateContentStreamResult, GenerationConfig, ModelParams, Part, RequestOptions, SafetySetting, StartChatParams, Tool, ToolConfig } from "../../types";
import { ChatSession } from "../methods/chat-session";

@@ -31,2 +31,4 @@ /**

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
constructor(apiKey: string, modelParams: ModelParams, requestOptions?: RequestOptions);

@@ -33,0 +35,0 @@ /**

@@ -40,2 +40,12 @@ /**

export declare function getClientHeaders(requestOptions: RequestOptions): string;
export declare function makeRequest(url: RequestUrl, body: string, requestOptions?: RequestOptions): Promise<Response>;
export declare function getHeaders(url: RequestUrl): Promise<Headers>;
export declare function constructRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions): Promise<{
url: string;
fetchOptions: RequestInit;
}>;
/**
* Wrapper for _makeRequestInternal that automatically uses native fetch,
* allowing _makeRequestInternal to be tested with a mocked fetch function.
*/
export declare function makeRequest(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions): Promise<Response>;
export declare function _makeRequestInternal(model: string, task: Task, apiKey: string, stream: boolean, body: string, requestOptions?: RequestOptions, fetchFn?: typeof fetch): Promise<Response>;

@@ -21,3 +21,3 @@ /**

*/
export declare const POSSIBLE_ROLES: readonly ["user", "model", "function"];
export declare const POSSIBLE_ROLES: readonly ["user", "model", "function", "system"];
/**

@@ -89,1 +89,10 @@ * Harm categories that would cause prompts or candidates to be blocked.

}
/**
* @public
*/
export declare enum FunctionCallingMode {
MODE_UNSPECIFIED = "MODE_UNSPECIFIED",
AUTO = "AUTO",
ANY = "ANY",
NONE = "NONE"
}

@@ -18,3 +18,3 @@ /**

import { Content } from "./content";
import { HarmBlockThreshold, HarmCategory, TaskType } from "./enums";
import { FunctionCallingMode, HarmBlockThreshold, HarmCategory, TaskType } from "./enums";
/**

@@ -35,2 +35,4 @@ * Base parameters for a number of methods.

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
}

@@ -44,2 +46,4 @@ /**

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
}

@@ -73,2 +77,4 @@ /**

tools?: Tool[];
toolConfig?: ToolConfig;
systemInstruction?: Content;
}

@@ -254,1 +260,15 @@ /**

}
/**
* Tool config. This config is shared for all tools provided in the request.
* @public
*/
export interface ToolConfig {
functionCallingConfig: FunctionCallingConfig;
}
/**
* @public
*/
export interface FunctionCallingConfig {
mode?: FunctionCallingMode;
allowedFunctionNames?: string[];
}
{
"name": "@google/generative-ai",
"version": "0.5.0",
"version": "0.6.0",
"description": "Google AI JavaScript SDK",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc