Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@langchain/google-genai

Package Overview
Dependencies
Maintainers
4
Versions
32
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/google-genai - npm Package Compare versions

Comparing version 0.0.4 to 0.0.5

4

dist/chat_models.d.ts

@@ -128,4 +128,4 @@ import type { SafetySetting } from "@google/generative-ai";

_llmType(): string;
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
}

@@ -163,3 +163,3 @@ import { GoogleGenerativeAI as GenerativeAI, } from "@google/generative-ai";

}
async _generate(messages, options, _runManager) {
async _generate(messages, options, runManager) {
const prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel);

@@ -183,5 +183,7 @@ const res = await this.caller.callWithOptions({ signal: options?.signal }, async () => {

});
return mapGenerateContentResultToChatResult(res.response);
const generationResult = mapGenerateContentResultToChatResult(res.response);
await runManager?.handleLLMNewToken(generationResult.generations[0].text ?? "");
return generationResult;
}
async *_streamResponseChunks(messages, options, _runManager) {
async *_streamResponseChunks(messages, options, runManager) {
const prompt = convertBaseMessagesToContent(messages, this._isMultimodalModel);

@@ -200,4 +202,5 @@ const stream = await this.caller.callWithOptions({ signal: options?.signal }, async () => {

yield chunk;
await runManager?.handleLLMNewToken(chunk.text ?? "");
}
}
}
{
"name": "@langchain/google-genai",
"version": "0.0.4",
"version": "0.0.5",
"description": "Sample integration for LangChain.js",

@@ -38,3 +38,3 @@ "type": "module",

"@google/generative-ai": "^0.1.0",
"@langchain/core": "~0.1.2"
"@langchain/core": "~0.1.3"
},

@@ -41,0 +41,0 @@ "devDependencies": {

@@ -24,10 +24,10 @@ # @langchain/google-genai

"resolutions": {
"@langchain/core": "0.1.1"
"@langchain/core": "0.1.2"
},
"overrides": {
"@langchain/core": "0.1.1"
"@langchain/core": "0.1.2"
},
"pnpm": {
"overrides": {
"@langchain/core": "0.1.1"
"@langchain/core": "0.1.2"
}

@@ -34,0 +34,0 @@ }

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc