Socket
Socket
Sign inDemoInstall

langchain

Package Overview
Dependencies
Maintainers
3
Versions
300
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

langchain - npm Package Compare versions

Comparing version 0.0.21 to 0.0.22

dist/embeddings/cohere.d.ts

6

dist/chains/chat_vector_db_chain.d.ts

@@ -1,4 +0,4 @@

import { BaseChain, ChainValues, SerializedBaseChain, SerializedLLMChain, LLMChain } from "./index.js";
import { BaseChain, ChainValues, LLMChain, SerializedBaseChain, SerializedLLMChain } from "./index.js";
import { BaseLLM } from "../llms/index.js";
import { VectorStore } from "../vectorstores/base.js";
import { BaseLLM } from "../llms/index.js";
export type LoadValues = Record<string, any>;

@@ -29,2 +29,3 @@ export interface ChatVectorDBQAChainInput {

questionGeneratorChain: LLMChain;
returnSourceDocuments: boolean;
constructor(fields: {

@@ -37,2 +38,3 @@ vectorstore: VectorStore;

k?: number;
returnSourceDocuments?: boolean;
});

@@ -39,0 +41,0 @@ _call(values: ChainValues): Promise<ChainValues>;

@@ -1,2 +0,2 @@

import { BaseChain, loadQAChain, LLMChain, } from "./index.js";
import { BaseChain, LLMChain, loadQAChain, } from "./index.js";
import { PromptTemplate } from "../prompts/index.js";

@@ -66,2 +66,8 @@ import { resolveConfigFromFile } from "../util/index.js";

});
Object.defineProperty(this, "returnSourceDocuments", {
enumerable: true,
configurable: true,
writable: true,
value: false
});
this.vectorstore = fields.vectorstore;

@@ -73,2 +79,4 @@ this.combineDocumentsChain = fields.combineDocumentsChain;

this.k = fields.k ?? this.k;
this.returnSourceDocuments =
fields.returnSourceDocuments ?? this.returnSourceDocuments;
}

@@ -105,2 +113,8 @@ async _call(values) {

const result = await this.combineDocumentsChain.call(inputs);
if (this.returnSourceDocuments) {
return {
...result,
sourceDocuments: docs,
};
}
return result;

@@ -107,0 +121,0 @@ }

@@ -26,2 +26,20 @@ import { test } from "@jest/globals";

});
test("Test ChatVectorDBQAChain with returnSourceDocuments", async () => {
const model = new OpenAI({ modelName: "text-ada-001" });
const prompt = PromptTemplate.fromTemplate("Print {question}, and ignore {chat_history}");
const vectorStore = await HNSWLib.fromTexts(["Hello world", "Bye bye", "hello nice world", "bye", "hi"], [{ id: 2 }, { id: 1 }, { id: 3 }, { id: 4 }, { id: 5 }], new OpenAIEmbeddings());
const llmChain = new LLMChain({ prompt, llm: model });
const combineDocsChain = new StuffDocumentsChain({
llmChain,
documentVariableName: "foo",
});
const chain = new ChatVectorDBQAChain({
combineDocumentsChain: combineDocsChain,
vectorstore: vectorStore,
questionGeneratorChain: llmChain,
returnSourceDocuments: true,
});
const res = await chain.call({ question: "foo", chat_history: "bar" });
console.log({ res });
});
test("Test ChatVectorDBQAChain from LLM", async () => {

@@ -28,0 +46,0 @@ const model = new OpenAI({ modelName: "text-ada-001" });

@@ -25,2 +25,3 @@ import { BaseChain, ChainValues, SerializedBaseChain } from "./index.js";

combineDocumentsChain: BaseChain;
returnSourceDocuments: boolean;
constructor(fields: {

@@ -32,2 +33,3 @@ vectorstore: VectorStore;

k?: number;
returnSourceDocuments?: boolean;
});

@@ -34,0 +36,0 @@ _call(values: ChainValues): Promise<ChainValues>;

@@ -39,2 +39,8 @@ import { BaseChain, loadQAChain, } from "./index.js";

});
Object.defineProperty(this, "returnSourceDocuments", {
enumerable: true,
configurable: true,
writable: true,
value: false
});
this.vectorstore = fields.vectorstore;

@@ -45,2 +51,4 @@ this.combineDocumentsChain = fields.combineDocumentsChain;

this.k = fields.k ?? this.k;
this.returnSourceDocuments =
fields.returnSourceDocuments ?? this.returnSourceDocuments;
}

@@ -55,2 +63,8 @@ async _call(values) {

const result = await this.combineDocumentsChain.call(inputs);
if (this.returnSourceDocuments) {
return {
...result,
sourceDocuments: docs,
};
}
return result;

@@ -57,0 +71,0 @@ }

@@ -1,2 +0,1 @@

import type HuggingFaceT from "huggingface";
import { LLM, LLMCallbackManager } from "./index.js";

@@ -6,5 +5,22 @@ interface HFInput {

model: string;
/** Sampling temperature to use */
temperature?: number;
/**
* Maximum number of tokens to generate in the completion.
*/
maxTokens?: number;
/** Total probability mass of tokens to consider at each step */
topP?: number;
/** Integer to define the top tokens considered within the sample operation to create new text. */
topK?: number;
/** Penalizes repeated tokens according to frequency */
frequencyPenalty?: number;
}
export declare class HuggingFaceInference extends LLM implements HFInput {
model: string;
temperature: number | undefined;
maxTokens: number | undefined;
topP: number | undefined;
topK: number | undefined;
frequencyPenalty: number | undefined;
constructor(fields?: Partial<HFInput> & {

@@ -19,5 +35,5 @@ callbackManager?: LLMCallbackManager;

static imports(): Promise<{
HuggingFace: typeof HuggingFaceT;
HfInference: typeof import("@huggingface/inference").HfInference;
}>;
}
export {};

@@ -11,3 +11,38 @@ import { LLM } from "./index.js";

});
Object.defineProperty(this, "temperature", {
enumerable: true,
configurable: true,
writable: true,
value: undefined
});
Object.defineProperty(this, "maxTokens", {
enumerable: true,
configurable: true,
writable: true,
value: undefined
});
Object.defineProperty(this, "topP", {
enumerable: true,
configurable: true,
writable: true,
value: undefined
});
Object.defineProperty(this, "topK", {
enumerable: true,
configurable: true,
writable: true,
value: undefined
});
Object.defineProperty(this, "frequencyPenalty", {
enumerable: true,
configurable: true,
writable: true,
value: undefined
});
this.model = fields?.model ?? this.model;
this.temperature = fields?.temperature ?? this.temperature;
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
this.topP = fields?.topP ?? this.topP;
this.topK = fields?.topK ?? this.topK;
this.frequencyPenalty = fields?.frequencyPenalty ?? this.frequencyPenalty;
}

@@ -21,6 +56,15 @@ _llmType() {

}
const { HuggingFace } = await HuggingFaceInference.imports();
const hf = new HuggingFace(process.env.HUGGINGFACEHUB_API_KEY ?? "");
const { HfInference } = await HuggingFaceInference.imports();
const hf = new HfInference(process.env.HUGGINGFACEHUB_API_KEY ?? "");
const res = await hf.textGeneration({
model: this.model,
parameters: {
// make it behave similar to openai, returning only the generated text
return_full_text: false,
temperature: this.temperature,
max_new_tokens: this.maxTokens,
top_p: this.topP,
top_k: this.topK,
repetition_penalty: this.frequencyPenalty,
},
inputs: prompt,

@@ -32,4 +76,4 @@ });

try {
const { default: HuggingFace } = await import("huggingface");
return { HuggingFace };
const { HfInference } = await import("@huggingface/inference");
return { HfInference };
}

@@ -36,0 +80,0 @@ catch (e) {

import { test } from "@jest/globals";
import { HuggingFaceInference } from "../hf.js";
test("Test HuggingFace", async () => {
const model = new HuggingFaceInference();
const model = new HuggingFaceInference({ temperature: 0.1, topP: 0.5 });
const res = await model.call("1 + 1 =");

@@ -6,0 +6,0 @@ console.log(res);

{
"name": "langchain",
"version": "0.0.21",
"version": "0.0.22",
"description": "Typescript bindings for langchain",

@@ -63,2 +63,3 @@ "type": "module",

"@dqbd/tiktoken": "^0.4.0",
"@huggingface/inference": "^1.5.1",
"@jest/globals": "^29.4.2",

@@ -82,3 +83,2 @@ "@pinecone-database/pinecone": "^0.0.8",

"hnswlib-node": "^1.3.0",
"huggingface": "^1.4.0",
"husky": "^8.0.3",

@@ -98,2 +98,3 @@ "jest": "^29.4.2",

"@dqbd/tiktoken": "^0.4.0",
"@huggingface/inference": "^1.5.1",
"@pinecone-database/pinecone": "^0.0.8",

@@ -104,3 +105,2 @@ "cheerio": "^1.0.0-rc.12",

"hnswlib-node": "^1.3.0",
"huggingface": "^1.4.0",
"serpapi": "^1.1.1",

@@ -113,2 +113,5 @@ "srt-parser-2": "^1.2.2"

},
"@huggingface/inference": {
"optional": true
},
"@pinecone-database/pinecone": {

@@ -149,3 +152,2 @@ "optional": true

"sqlite3": "^5.1.4",
"unfetch": "^5.0.0",
"uuid": "^9.0.0",

@@ -152,0 +154,0 @@ "yaml": "^2.2.1"

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc