Socket
Socket
Sign inDemoInstall

langchain

Package Overview
Dependencies
Maintainers
1
Versions
300
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

langchain - npm Package Compare versions

Comparing version 0.0.4 to 0.0.5

dist/cjs/agents/initialize.d.ts

54

dist/cjs/agents/agent.d.ts

@@ -7,3 +7,12 @@ import { ChainValues } from "../chains";

export interface StaticAgent {
/**
* Create a prompt for this class
*
* @param tools - List of tools the agent will have access to, used to format the prompt.
* @param fields - Additional fields used to format the prompt.
*
* @returns A PromptTemplate assembled from the given tools and fields.
* */
createPrompt(tools: Tool[], fields?: Record<string, any>): BasePromptTemplate;
/** Construct an agent from an LLM and a list of tools */
fromLLMAndTools(llm: BaseLLM, tools: Tool[], args?: Record<string, any>): Agent;

@@ -18,2 +27,9 @@ validateTools(_: Tool[]): void;

}
/**
* Class responsible for calling a language model and deciding an action.
*
* @remarks This is driven by an LLMChain. The prompt in the LLMChain *must*
* include a variable called "agent_scratchpad" where the agent can put its
* intermediary work.
*/
export declare abstract class Agent {

@@ -24,2 +40,5 @@ llmChain: LLMChain;

constructor(input: AgentInput);
/**
* Extract tool and tool input from LLM output.
*/
abstract extractToolAndInput(input: string): {

@@ -29,13 +48,48 @@ tool: string;

} | null;
/**
* Prefix to append the observation with.
*/
abstract observationPrefix(): string;
/**
* Prefix to append the LLM call with.
*/
abstract llmPrefix(): string;
/**
* Return the string type key uniquely identifying this class of agent.
*/
abstract _agentType(): string;
/**
* Prepare the agent for a new call, if needed
*/
prepareForNewCall(): void;
/**
* Validate that appropriate tools are passed in
*/
static validateTools(_: Tool[]): void;
_stop(): string[];
/**
* Name of tool to use to terminate the chain.
*/
finishToolName(): string;
/**
* Construct a scratchpad to let the agent continue its thought process
*/
private constructScratchPad;
private _plan;
/**
* Decide what to do given some input.
*
* @param steps - Steps the LLM has taken so far, along with observations from each.
* @param inputs - User inputs.
*
* @returns Action specifying what tool to use.
*/
plan(steps: AgentStep[], inputs: ChainValues): Promise<AgentAction | AgentFinish>;
/**
* Return response when agent has been stopped due to max iterations
*/
returnStoppedResponse(earlyStoppingMethod: StoppingMethod, steps: AgentStep[], inputs: ChainValues): Promise<AgentFinish>;
/**
* Load an agent from a json-like object describing it.
*/
static deserialize(data: SerializedAgent & {

@@ -42,0 +96,0 @@ llm?: BaseLLM;

140

dist/cjs/agents/agent.js
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -28,2 +19,9 @@ exports.Agent = exports.staticImplements = void 0;

exports.staticImplements = staticImplements;
/**
* Class responsible for calling a language model and deciding an action.
*
* @remarks This is driven by an LLMChain. The prompt in the LLMChain *must*
* include a variable called "agent_scratchpad" where the agent can put its
* intermediary work.
*/
class Agent {

@@ -52,3 +50,9 @@ constructor(input) {

}
/**
* Prepare the agent for a new call, if needed
*/
prepareForNewCall() { }
/**
* Validate that appropriate tools are passed in
*/
// eslint-disable-next-line no-unused-vars

@@ -59,5 +63,11 @@ static validateTools(_) { }

}
/**
* Name of tool to use to terminate the chain.
*/
finishToolName() {
return "Final Answer";
}
/**
* Construct a scratchpad to let the agent continue its thought process
*/
constructScratchPad(steps) {

@@ -71,60 +81,72 @@ return steps.reduce((thoughts, { action, observation }) => thoughts +

}
_plan(steps, inputs, suffix) {
return __awaiter(this, void 0, void 0, function* () {
const thoughts = this.constructScratchPad(steps);
const newInputs = Object.assign(Object.assign({}, inputs), { agent_scratchpad: suffix ? `${thoughts}${suffix}` : thoughts, stop: this._stop() });
const output = yield this.llmChain.predict(newInputs);
const parsed = this.extractToolAndInput(output);
if (!parsed) {
throw new ParseError(`Invalid output: ${output}`, output);
}
const action = {
tool: parsed.tool,
toolInput: parsed.input,
log: output,
};
if (action.tool === this.finishToolName()) {
return { returnValues: { output: action.toolInput }, log: action.log };
}
return action;
});
async _plan(steps, inputs, suffix) {
const thoughts = this.constructScratchPad(steps);
const newInputs = {
...inputs,
agent_scratchpad: suffix ? `${thoughts}${suffix}` : thoughts,
stop: this._stop(),
};
const output = await this.llmChain.predict(newInputs);
const parsed = this.extractToolAndInput(output);
if (!parsed) {
throw new ParseError(`Invalid output: ${output}`, output);
}
const action = {
tool: parsed.tool,
toolInput: parsed.input,
log: output,
};
if (action.tool === this.finishToolName()) {
return { returnValues: { output: action.toolInput }, log: action.log };
}
return action;
}
/**
* Decide what to do given some input.
*
* @param steps - Steps the LLM has taken so far, along with observations from each.
* @param inputs - User inputs.
*
* @returns Action specifying what tool to use.
*/
plan(steps, inputs) {
return this._plan(steps, inputs);
}
returnStoppedResponse(earlyStoppingMethod, steps, inputs) {
return __awaiter(this, void 0, void 0, function* () {
if (earlyStoppingMethod === "force") {
return {
returnValues: { output: "Agent stopped due to max iterations." },
log: "",
};
/**
* Return response when agent has been stopped due to max iterations
*/
async returnStoppedResponse(earlyStoppingMethod, steps, inputs) {
if (earlyStoppingMethod === "force") {
return {
returnValues: { output: "Agent stopped due to max iterations." },
log: "",
};
}
if (earlyStoppingMethod === "generate") {
try {
const action = await this._plan(steps, inputs, "\n\nI now need to return a final answer based on the previous steps:");
if ("returnValues" in action) {
return action;
}
return { returnValues: { output: action.log }, log: action.log };
}
if (earlyStoppingMethod === "generate") {
try {
const action = yield this._plan(steps, inputs, "\n\nI now need to return a final answer based on the previous steps:");
if ("returnValues" in action) {
return action;
}
return { returnValues: { output: action.log }, log: action.log };
catch (err) {
if (!(err instanceof ParseError)) {
throw err;
}
catch (err) {
if (!(err instanceof ParseError)) {
throw err;
}
return { returnValues: { output: err.output }, log: err.output };
}
return { returnValues: { output: err.output }, log: err.output };
}
throw new Error(`Invalid stopping method: ${earlyStoppingMethod}`);
});
}
throw new Error(`Invalid stopping method: ${earlyStoppingMethod}`);
}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
switch (data._type) {
case "zero-shot-react-description":
return index_1.ZeroShotAgent.deserialize(data);
default:
throw new Error("Unknown agent type");
}
});
/**
* Load an agent from a json-like object describing it.
*/
static async deserialize(data) {
switch (data._type) {
case "zero-shot-react-description":
return index_1.ZeroShotAgent.deserialize(data);
default:
throw new Error("Unknown agent type");
}
}

@@ -131,0 +153,0 @@ }

@@ -11,2 +11,6 @@ import { ChainValues, BaseChain } from "../chains";

};
/**
* A chain managing an agent using tools.
* @augments BaseChain
*/
export declare class AgentExecutor extends BaseChain {

@@ -19,2 +23,3 @@ agent: Agent;

constructor(input: AgentExecutorInput);
/** Create from agent and a list of tools. */
static fromAgentAndTools(fields: {

@@ -21,0 +26,0 @@ agent: Agent;

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AgentExecutor = void 0;
const chains_1 = require("../chains");
/**
* A chain managing an agent using tools.
* @augments BaseChain
*/
class AgentExecutor extends chains_1.BaseChain {

@@ -56,2 +51,3 @@ constructor(input) {

}
/** Create from agent and a list of tools. */
static fromAgentAndTools(fields) {

@@ -63,36 +59,34 @@ return new AgentExecutor(fields);

}
_call(inputs) {
return __awaiter(this, void 0, void 0, function* () {
this.agent.prepareForNewCall();
const toolsByName = Object.fromEntries(this.tools.map((t) => [t.name, t]));
const steps = [];
let iterations = 0;
const getOutput = (finishStep) => {
const { returnValues } = finishStep;
if (this.returnIntermediateSteps) {
return Object.assign(Object.assign({}, returnValues), { intermediateSteps: steps });
}
return returnValues;
};
while (this.shouldContinue(iterations)) {
const action = yield this.agent.plan(steps, inputs);
if ("returnValues" in action) {
return getOutput(action);
}
const tool = toolsByName[action.tool];
const observation = tool
? yield tool.call(action.toolInput)
: `${action.tool} is not a valid tool, try another one.`;
steps.push({ action, observation });
if (tool === null || tool === void 0 ? void 0 : tool.returnDirect) {
return getOutput({
returnValues: { [this.agent.returnValues[0]]: observation },
log: "",
});
}
iterations += 1;
async _call(inputs) {
this.agent.prepareForNewCall();
const toolsByName = Object.fromEntries(this.tools.map((t) => [t.name.toLowerCase(), t]));
const steps = [];
let iterations = 0;
const getOutput = (finishStep) => {
const { returnValues } = finishStep;
if (this.returnIntermediateSteps) {
return { ...returnValues, intermediateSteps: steps };
}
const finish = yield this.agent.returnStoppedResponse(this.earlyStoppingMethod, steps, inputs);
return getOutput(finish);
});
return returnValues;
};
while (this.shouldContinue(iterations)) {
const action = await this.agent.plan(steps, inputs);
if ("returnValues" in action) {
return getOutput(action);
}
const tool = toolsByName[action.tool.toLowerCase()];
const observation = tool
? await tool.call(action.toolInput)
: `${action.tool} is not a valid tool, try another one.`;
steps.push({ action, observation });
if (tool === null || tool === void 0 ? void 0 : tool.returnDirect) {
return getOutput({
returnValues: { [this.agent.returnValues[0]]: observation },
log: "",
});
}
iterations += 1;
}
const finish = await this.agent.returnStoppedResponse(this.earlyStoppingMethod, steps, inputs);
return getOutput(finish);
}

@@ -99,0 +93,0 @@ _chainType() {

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -15,3 +6,3 @@ exports.deserializeHelper = void 0;

const util_1 = require("../util");
const deserializeHelper = (llm, tools, data, fromLLMAndTools, fromConstructor) => __awaiter(void 0, void 0, void 0, function* () {
const deserializeHelper = async (llm, tools, data, fromLLMAndTools, fromConstructor) => {
if (data.load_from_llm_and_tools) {

@@ -27,6 +18,6 @@ if (!llm) {

const serializedLLMChain = (0, util_1.resolveConfigFromFile)("llm_chain", data);
const llmChain = yield chains_1.LLMChain.deserialize(serializedLLMChain);
return fromConstructor(Object.assign(Object.assign({}, data), { llmChain }));
});
const llmChain = await chains_1.LLMChain.deserialize(serializedLLMChain);
return fromConstructor({ ...data, llmChain });
};
exports.deserializeHelper = deserializeHelper;
//# sourceMappingURL=helpers.js.map

@@ -6,2 +6,3 @@ export { AgentAction, AgentFinish, AgentStep, StoppingMethod, SerializedAgentT, } from "./types";

export { Tool } from "./tools";
export { initializeAgentExecutor } from "./initialize";
export { loadAgent } from "./load";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadAgent = exports.ZeroShotAgent = exports.AgentExecutor = exports.staticImplements = exports.Agent = void 0;
exports.loadAgent = exports.initializeAgentExecutor = exports.Tool = exports.ZeroShotAgent = exports.AgentExecutor = exports.staticImplements = exports.Agent = void 0;
var agent_1 = require("./agent");

@@ -11,4 +11,8 @@ Object.defineProperty(exports, "Agent", { enumerable: true, get: function () { return agent_1.Agent; } });

Object.defineProperty(exports, "ZeroShotAgent", { enumerable: true, get: function () { return mrkl_1.ZeroShotAgent; } });
var tools_1 = require("./tools");
Object.defineProperty(exports, "Tool", { enumerable: true, get: function () { return tools_1.Tool; } });
var initialize_1 = require("./initialize");
Object.defineProperty(exports, "initializeAgentExecutor", { enumerable: true, get: function () { return initialize_1.initializeAgentExecutor; } });
var load_1 = require("./load");
Object.defineProperty(exports, "loadAgent", { enumerable: true, get: function () { return load_1.loadAgent; } });
//# sourceMappingURL=index.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -16,8 +7,8 @@ exports.loadAgent = void 0;

const util_1 = require("../util");
const loadAgentFromFile = (file, llmAndTools) => __awaiter(void 0, void 0, void 0, function* () {
const loadAgentFromFile = async (file, llmAndTools) => {
const serialized = (0, util_1.parseFileConfig)(file);
return _1.Agent.deserialize(Object.assign(Object.assign({}, serialized), llmAndTools));
});
const loadAgent = (uri, llmAndTools) => __awaiter(void 0, void 0, void 0, function* () {
const hubResult = yield (0, hub_1.loadFromHub)(uri, (u) => loadAgentFromFile(u, llmAndTools), "agents", new Set(["json", "yaml"]));
return _1.Agent.deserialize({ ...serialized, ...llmAndTools });
};
const loadAgent = async (uri, llmAndTools) => {
const hubResult = await (0, hub_1.loadFromHub)(uri, (u) => loadAgentFromFile(u, llmAndTools), "agents", new Set(["json", "yaml"]));
if (hubResult) {

@@ -27,4 +18,4 @@ return hubResult;

return loadAgentFromFile(uri, llmAndTools);
});
};
exports.loadAgent = loadAgent;
//# sourceMappingURL=load.js.map

@@ -11,7 +11,15 @@ import { BaseLLM } from "../../llms";

type CreatePromptArgs = {
/** String to put after the list of tools. */
suffix?: string;
/** String to put before the list of tools. */
prefix?: string;
/** List of input variables the final prompt will expect. */
inputVariables?: string[];
};
type ZeroShotAgentInput = AgentInput;
/**
* Agent for the MRKL chain.
* @augments Agent
* @augments StaticAgent
*/
export declare class ZeroShotAgent extends Agent {

@@ -23,2 +31,11 @@ constructor(input: ZeroShotAgentInput);

static validateTools(tools: Tool[]): void;
/**
* Create prompt in the style of the zero shot agent.
*
* @param tools - List of tools the agent will have access to, used to format the prompt.
* @param args - Arguments to create the prompt with.
* @param args.suffix - String to put after the list of tools.
* @param args.prefix - String to put before the list of tools.
* @param args.inputVariables - List of input variables the final prompt will expect.
*/
static createPrompt(tools: Tool[], args?: CreatePromptArgs): PromptTemplate;

@@ -25,0 +42,0 @@ static fromLLMAndTools(llm: BaseLLM, tools: Tool[], args?: CreatePromptArgs): ZeroShotAgent;

@@ -8,22 +8,2 @@ "use strict";

};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
var ZeroShotAgent_1;

@@ -38,2 +18,7 @@ Object.defineProperty(exports, "__esModule", { value: true });

const FINAL_ANSWER_ACTION = "Final Answer:";
/**
* Agent for the MRKL chain.
* @augments Agent
* @augments StaticAgent
*/
let ZeroShotAgent = ZeroShotAgent_1 = class ZeroShotAgent extends index_1.Agent {

@@ -60,2 +45,11 @@ constructor(input) {

}
/**
* Create prompt in the style of the zero shot agent.
*
* @param tools - List of tools the agent will have access to, used to format the prompt.
* @param args - Arguments to create the prompt with.
* @param args.suffix - String to put after the list of tools.
* @param args.prefix - String to put before the list of tools.
* @param args.inputVariables - List of input variables the final prompt will expect.
*/
static createPrompt(tools, args) {

@@ -97,11 +91,9 @@ const { prefix = prompt_2.PREFIX, suffix = prompt_2.SUFFIX, inputVariables = ["input", "agent_scratchpad"], } = args !== null && args !== void 0 ? args : {};

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const { llm, tools } = data, rest = __rest(data, ["llm", "tools"]);
return (0, helpers_1.deserializeHelper)(llm, tools, rest, (llm, tools, args) => ZeroShotAgent_1.fromLLMAndTools(llm, tools, {
prefix: args.prefix,
suffix: args.suffix,
inputVariables: args.input_variables,
}), (args) => new ZeroShotAgent_1(args));
});
static async deserialize(data) {
const { llm, tools, ...rest } = data;
return (0, helpers_1.deserializeHelper)(llm, tools, rest, (llm, tools, args) => ZeroShotAgent_1.fromLLMAndTools(llm, tools, {
prefix: args.prefix,
suffix: args.suffix,
inputVariables: args.input_variables,
}), (args) => new ZeroShotAgent_1(args));
}

@@ -108,0 +100,0 @@ };

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SUFFIX = exports.formatInstructions = exports.PREFIX = void 0;
/* eslint-disable max-len */
exports.PREFIX = `Answer the following questions as best you can. You have access to the following tools:`;

@@ -6,0 +5,0 @@ const formatInstructions = (toolNames) => `Use the following format:

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -17,6 +8,8 @@ const globals_1 = require("@jest/globals");

const serpapi_1 = require("../tools/serpapi");
(0, globals_1.test)("Run agent from hub", () => __awaiter(void 0, void 0, void 0, function* () {
const model = new openai_1.OpenAI({});
const tools = [(0, serpapi_1.SerpAPI)()];
const agent = yield (0, load_1.loadAgent)("lc://agents/zero-shot-react-description/agent.json", { llm: model, tools });
const calculator_1 = require("../tools/calculator");
const initialize_1 = require("../initialize");
(0, globals_1.test)("Run agent from hub", async () => {
const model = new openai_1.OpenAI({ temperature: 0 });
const tools = [new serpapi_1.SerpAPI(), new calculator_1.Calculator()];
const agent = await (0, load_1.loadAgent)("lc://agents/zero-shot-react-description/agent.json", { llm: model, tools });
const executor = index_1.AgentExecutor.fromAgentAndTools({

@@ -27,7 +20,18 @@ agent,

});
const res = yield executor.call({
const res = await executor.call({
input: "Who is Olivia Wilde's boyfriend? What is his current age raised to the 0.23 power?",
});
console.log(res);
}), 30000);
}, 30000);
(0, globals_1.test)("Run agent locally", async () => {
const model = new openai_1.OpenAI({ temperature: 0 });
const tools = [new serpapi_1.SerpAPI(), new calculator_1.Calculator()];
const executor = await (0, initialize_1.initializeAgentExecutor)(tools, model, "zero-shot-react-description");
console.log("Loaded agent.");
const input = "Who is Olivia Wilde's boyfriend?" +
" What is his current age raised to the 0.23 power?";
console.log(`Executing with input "${input}"...`);
const result = await executor.call({ input });
console.log(`Got output ${result.output}`);
}, 30000);
//# sourceMappingURL=agent.test.js.map
export { SerpAPI } from "./serpapi";
export interface Tool {
call: (arg: string) => Promise<string>;
name: string;
description: string;
returnDirect?: boolean;
}
export { Calculator } from "./calculator";
export { Tool } from "./base";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerpAPI = void 0;
exports.Tool = exports.Calculator = exports.SerpAPI = void 0;
var serpapi_1 = require("./serpapi");
Object.defineProperty(exports, "SerpAPI", { enumerable: true, get: function () { return serpapi_1.SerpAPI; } });
var calculator_1 = require("./calculator");
Object.defineProperty(exports, "Calculator", { enumerable: true, get: function () { return calculator_1.Calculator; } });
var base_1 = require("./base");
Object.defineProperty(exports, "Tool", { enumerable: true, get: function () { return base_1.Tool; } });
//# sourceMappingURL=index.js.map

@@ -1,3 +0,18 @@

import { GoogleParameters } from "serpapi";
import { Tool } from "./index";
export declare const SerpAPI: (params?: Partial<GoogleParameters>, apiKey?: string) => Tool;
import type { GoogleParameters } from "serpapi";
import { Tool } from "./base";
/**
* Wrapper around SerpAPI.
*
* To use, you should have the `serpapi` package installed and the SERPAPI_API_KEY environment variable set.
*/
export declare class SerpAPI extends Tool {
protected key: string;
protected params: Partial<GoogleParameters>;
constructor(apiKey?: string | undefined, params?: Partial<GoogleParameters>);
name: string;
/**
* Run query through SerpAPI and parse result
*/
call(input: string): Promise<any>;
description: string;
}
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SerpAPI = void 0;
const serpapi_1 = require("serpapi");
const SerpAPI = (params, apiKey) => {
const key = apiKey !== null && apiKey !== void 0 ? apiKey : process.env.SERPAPI_API_KEY;
return {
name: "search",
call: (input) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d, _e, _f, _g;
const res = yield (0, serpapi_1.getJson)("google", Object.assign(Object.assign({}, params), { api_key: key, q: input }));
if (res.error) {
throw new Error(`Got error from serpAPI: ${res.error}`);
}
if ((_a = res.answer_box) === null || _a === void 0 ? void 0 : _a.answer) {
return res.answer_box.answer;
}
if ((_b = res.answer_box) === null || _b === void 0 ? void 0 : _b.snippet) {
return res.answer_box.snippet;
}
if ((_c = res.answer_box) === null || _c === void 0 ? void 0 : _c.snippet_highlighted_words) {
return res.answer_box.snippet_highlighted_words[0];
}
if ((_d = res.sports_results) === null || _d === void 0 ? void 0 : _d.game_spotlight) {
return res.sports_results.game_spotlight;
}
if ((_e = res.knowledge_graph) === null || _e === void 0 ? void 0 : _e.description) {
return res.knowledge_graph.description;
}
if ((_g = (_f = res.organic_results) === null || _f === void 0 ? void 0 : _f[0]) === null || _g === void 0 ? void 0 : _g.snippet) {
return res.organic_results[0].snippet;
}
return "No good search result found";
}),
description:
// eslint-disable-next-line max-len
"a search engine. useful for when you need to answer questions about current events. input should be a search query.",
};
};
const base_1 = require("./base");
let getJson = null;
try {
// eslint-disable-next-line global-require,import/no-extraneous-dependencies
({ getJson } = require("serpapi"));
}
catch (_a) {
// ignore error
}
/**
* Wrapper around SerpAPI.
*
* To use, you should have the `serpapi` package installed and the SERPAPI_API_KEY environment variable set.
*/
class SerpAPI extends base_1.Tool {
constructor(apiKey = process.env.SERPAPI_API_KEY, params = {}) {
super();
Object.defineProperty(this, "key", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "params", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "name", {
enumerable: true,
configurable: true,
writable: true,
value: "search"
});
Object.defineProperty(this, "description", {
enumerable: true,
configurable: true,
writable: true,
value: "a search engine. useful for when you need to answer questions about current events. input should be a search query."
});
// Throw error at construction time.
if (getJson === null) {
throw new Error("Please install serpapi as a dependency with, e.g. `npm i serpapi`");
}
if (!apiKey) {
throw new Error("SerpAPI API key not set. You can set it as SERPAPI_API_KEY in your .env file, or pass it to SerpAPI.");
}
this.key = apiKey;
this.params = params;
}
/**
* Run query through SerpAPI and parse result
*/
async call(input) {
var _a, _b, _c, _d, _e, _f, _g;
if (getJson === null) {
throw new Error("Please install serpapi as a dependency with, e.g. `npm i serpapi`");
}
const res = await getJson("google", {
...this.params,
api_key: this.key,
q: input,
});
if (res.error) {
throw new Error(`Got error from serpAPI: ${res.error}`);
}
if ((_a = res.answer_box) === null || _a === void 0 ? void 0 : _a.answer) {
return res.answer_box.answer;
}
if ((_b = res.answer_box) === null || _b === void 0 ? void 0 : _b.snippet) {
return res.answer_box.snippet;
}
if ((_c = res.answer_box) === null || _c === void 0 ? void 0 : _c.snippet_highlighted_words) {
return res.answer_box.snippet_highlighted_words[0];
}
if ((_d = res.sports_results) === null || _d === void 0 ? void 0 : _d.game_spotlight) {
return res.sports_results.game_spotlight;
}
if ((_e = res.knowledge_graph) === null || _e === void 0 ? void 0 : _e.description) {
return res.knowledge_graph.description;
}
if ((_g = (_f = res.organic_results) === null || _f === void 0 ? void 0 : _f[0]) === null || _g === void 0 ? void 0 : _g.snippet) {
return res.organic_results[0].snippet;
}
return "No good search result found";
}
}
exports.SerpAPI = SerpAPI;
//# sourceMappingURL=serpapi.js.map

@@ -1,12 +0,42 @@

import { SerializedLLMChain } from "./index";
import { LLMChain, StuffDocumentsChain, VectorDBQAChain, ChatVectorDBQAChain } from "./index";
import { BaseMemory } from "../memory";
export type ChainValues = Record<string, any>;
type SerializedBaseChain = SerializedLLMChain;
export declare abstract class BaseChain {
export type LoadValues = Record<string, any>;
declare const chainClasses: (typeof LLMChain | typeof StuffDocumentsChain | typeof VectorDBQAChain | typeof ChatVectorDBQAChain)[];
export type SerializedBaseChain = ReturnType<InstanceType<(typeof chainClasses)[number]>["serialize"]>;
export interface ChainInputs {
memory?: BaseMemory;
}
/**
* Base interface that all chains must implement.
*/
export declare abstract class BaseChain implements ChainInputs {
memory?: BaseMemory;
/**
* Run the core logic of this chain and return the output
*/
abstract _call(values: ChainValues): Promise<ChainValues>;
/**
* Return the string type key uniquely identifying this class of chain.
*/
abstract _chainType(): string;
/**
* Return a json-like object representing this chain.
*/
abstract serialize(): SerializedBaseChain;
/**
* Run the core logic of this chain and add to output if desired.
*
* Wraps {@link _call} and handles memory.
*/
call(values: ChainValues): Promise<ChainValues>;
/**
* Call the chain on all inputs in the list
*/
apply(inputs: ChainValues[]): ChainValues[];
static deserialize(data: SerializedBaseChain): Promise<BaseChain>;
/**
* Load a chain from a json-like object describing it.
*/
static deserialize(data: SerializedBaseChain, values?: LoadValues): Promise<BaseChain>;
}
export {};

@@ -5,14 +5,52 @@ "use strict";

const index_1 = require("./index");
const chainClasses = [index_1.LLMChain, index_1.StuffDocumentsChain, index_1.VectorDBQAChain, index_1.ChatVectorDBQAChain];
/**
* Base interface that all chains must implement.
*/
class BaseChain {
call(values) {
constructor() {
Object.defineProperty(this, "memory", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
}
/**
* Run the core logic of this chain and add to output if desired.
*
* Wraps {@link _call} and handles memory.
*/
async call(values) {
const fullValues = structuredClone(values);
if (!(this.memory == null)) {
const newValues = await this.memory.loadMemoryVariables(values);
for (const [key, value] of Object.entries(newValues)) {
fullValues[key] = value;
}
}
// TODO(sean) add callback support
return this._call(values);
const outputValues = this._call(fullValues);
if (!(this.memory == null)) {
this.memory.saveContext(values, outputValues);
}
return outputValues;
}
/**
* Call the chain on all inputs in the list
*/
apply(inputs) {
return inputs.map(this.call);
}
static deserialize(data) {
/**
* Load a chain from a json-like object describing it.
*/
static deserialize(data, values = {}) {
switch (data._type) {
case "llm_chain":
return index_1.LLMChain.deserialize(data);
case "stuff_documents_chain":
return index_1.StuffDocumentsChain.deserialize(data);
case "vector_db_qa":
return index_1.VectorDBQAChain.deserialize(data, values);
default:

@@ -19,0 +57,0 @@ throw new Error(`Invalid prompt type in config: ${data._type}`);

export { BaseChain, ChainValues } from "./base";
export { SerializedLLMChain, LLMChain } from "./llm_chain";
export { SerializedLLMChain, LLMChain, ConversationChain } from "./llm_chain";
export { SerializedStuffDocumentsChain, StuffDocumentsChain, } from "./combine_docs_chain";
export { ChatVectorDBQAChain, SerializedChatVectorDBQAChain } from "./chat_vector_db_chain";
export { VectorDBQAChain, SerializedVectorDBQAChain } from "./vector_db_qa";
export { loadChain } from "./load";
export { loadQAChain } from "./question_answering/load";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadChain = exports.LLMChain = exports.BaseChain = void 0;
exports.loadQAChain = exports.loadChain = exports.VectorDBQAChain = exports.ChatVectorDBQAChain = exports.StuffDocumentsChain = exports.ConversationChain = exports.LLMChain = exports.BaseChain = void 0;
var base_1 = require("./base");

@@ -8,4 +8,13 @@ Object.defineProperty(exports, "BaseChain", { enumerable: true, get: function () { return base_1.BaseChain; } });

Object.defineProperty(exports, "LLMChain", { enumerable: true, get: function () { return llm_chain_1.LLMChain; } });
Object.defineProperty(exports, "ConversationChain", { enumerable: true, get: function () { return llm_chain_1.ConversationChain; } });
var combine_docs_chain_1 = require("./combine_docs_chain");
Object.defineProperty(exports, "StuffDocumentsChain", { enumerable: true, get: function () { return combine_docs_chain_1.StuffDocumentsChain; } });
var chat_vector_db_chain_1 = require("./chat_vector_db_chain");
Object.defineProperty(exports, "ChatVectorDBQAChain", { enumerable: true, get: function () { return chat_vector_db_chain_1.ChatVectorDBQAChain; } });
var vector_db_qa_1 = require("./vector_db_qa");
Object.defineProperty(exports, "VectorDBQAChain", { enumerable: true, get: function () { return vector_db_qa_1.VectorDBQAChain; } });
var load_1 = require("./load");
Object.defineProperty(exports, "loadChain", { enumerable: true, get: function () { return load_1.loadChain; } });
var load_2 = require("./question_answering/load");
Object.defineProperty(exports, "loadQAChain", { enumerable: true, get: function () { return load_2.loadQAChain; } });
//# sourceMappingURL=index.js.map
import { BaseChain, ChainValues } from "./index";
import { BaseLLM, SerializedLLM } from "../llms";
import { BaseMemory } from "../memory";
import { BasePromptTemplate, SerializedBasePromptTemplate } from "../prompt";
export interface LLMChainInput {
/** Prompt object to use */
prompt: BasePromptTemplate;
/** LLM Wrapper to use */
llm: BaseLLM;
/** @ignore */
outputKey: string;

@@ -16,2 +20,14 @@ }

};
/**
* Chain to run queries against LLMs.
* @augments BaseChain
* @augments LLMChainInput
*
* @example
* ```ts
* import { LLMChain, OpenAI, PromptTemplate } from "langchain";
* const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
* const llm = LLMChain({ llm: new OpenAI(), prompt });
* ```
*/
export declare class LLMChain extends BaseChain implements LLMChainInput {

@@ -27,2 +43,13 @@ prompt: BasePromptTemplate;

_call(values: ChainValues): Promise<ChainValues>;
/**
* Format prompt with values and pass to LLM
*
* @param values - keys to pass to prompt template
* @returns Completion from LLM.
*
* @example
* ```ts
* llm.predict({ adjective: "funny" })
* ```
*/
predict(values: ChainValues): Promise<string>;

@@ -33,1 +60,9 @@ _chainType(): "llm_chain";

}
export declare class ConversationChain extends LLMChain {
constructor(fields: {
llm: BaseLLM;
prompt?: BasePromptTemplate;
outputKey?: string;
memory?: BaseMemory;
});
}
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.LLMChain = void 0;
exports.ConversationChain = exports.LLMChain = void 0;
const index_1 = require("./index");
const llms_1 = require("../llms");
const memory_1 = require("../memory");
const prompt_1 = require("../prompt");
const util_1 = require("../util");
/**
* Chain to run queries against LLMs.
* @augments BaseChain
* @augments LLMChainInput
*
* @example
* ```ts
* import { LLMChain, OpenAI, PromptTemplate } from "langchain";
* const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
* const llm = LLMChain({ llm: new OpenAI(), prompt });
* ```
*/
class LLMChain extends index_1.BaseChain {

@@ -43,19 +47,26 @@ constructor(fields) {

}
_call(values) {
return __awaiter(this, void 0, void 0, function* () {
let stop;
if ("stop" in values && Array.isArray(values.stop)) {
stop = values.stop;
}
const formattedString = this.prompt.format(values);
const llmResult = yield this.llm.call(formattedString, stop);
const result = { [this.outputKey]: llmResult };
return result;
});
async _call(values) {
let stop;
if ("stop" in values && Array.isArray(values.stop)) {
stop = values.stop;
}
const formattedString = this.prompt.format(values);
const llmResult = await this.llm.call(formattedString, stop);
const result = { [this.outputKey]: llmResult };
return result;
}
predict(values) {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.call(values);
return output[this.outputKey];
});
/**
* Format prompt with values and pass to LLM
*
* @param values - keys to pass to prompt template
* @returns Completion from LLM.
*
* @example
* ```ts
* llm.predict({ adjective: "funny" })
* ```
*/
async predict(values) {
const output = await this.call(values);
return output[this.outputKey];
}

@@ -65,10 +76,8 @@ _chainType() {

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const serializedLLM = (0, util_1.resolveConfigFromFile)("llm", data);
const serializedPrompt = (0, util_1.resolveConfigFromFile)("prompt", data);
return new LLMChain({
llm: yield llms_1.BaseLLM.deserialize(serializedLLM),
prompt: yield prompt_1.BasePromptTemplate.deserialize(serializedPrompt),
});
static async deserialize(data) {
const serializedLLM = (0, util_1.resolveConfigFromFile)("llm", data);
const serializedPrompt = (0, util_1.resolveConfigFromFile)("prompt", data);
return new LLMChain({
llm: await llms_1.BaseLLM.deserialize(serializedLLM),
prompt: await prompt_1.BasePromptTemplate.deserialize(serializedPrompt),
});

@@ -85,2 +94,25 @@ }

exports.LLMChain = LLMChain;
// eslint-disable-next-line max-len
const defaultTemplate = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
Current conversation:
{history}
Human: {input}
AI:`;
const defaultPrompt = new prompt_1.PromptTemplate({
template: defaultTemplate,
inputVariables: ["history", "input"],
});
class ConversationChain extends LLMChain {
constructor(fields) {
var _a, _b, _c;
super({
prompt: (_a = fields.prompt) !== null && _a !== void 0 ? _a : defaultPrompt,
llm: fields.llm,
outputKey: (_b = fields.outputKey) !== null && _b !== void 0 ? _b : "response",
});
this.memory = (_c = fields.memory) !== null && _c !== void 0 ? _c : new memory_1.BufferMemory();
}
}
exports.ConversationChain = ConversationChain;
//# sourceMappingURL=llm_chain.js.map
import { BaseChain } from ".";
export declare const loadChain: (uri: string) => Promise<BaseChain>;
export type LoadValues = Record<string, any>;
/**
* Load a chain from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("lc://chains/hello-world/chain.json");
* const res = await chain.call({ topic: "my favorite color" });
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("/path/to/chain.json");
* ```
*/
export declare const loadChain: (uri: string, values?: LoadValues) => Promise<BaseChain>;
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -16,15 +7,32 @@ exports.loadChain = void 0;

const util_1 = require("../util");
const loadChainFromFile = (file) => __awaiter(void 0, void 0, void 0, function* () {
const loadChainFromFile = async (file, values = {}) => {
const serialized = (0, util_1.parseFileConfig)(file);
console.log({ serialized });
return _1.BaseChain.deserialize(serialized);
});
const loadChain = (uri) => __awaiter(void 0, void 0, void 0, function* () {
const hubResult = yield (0, hub_1.loadFromHub)(uri, loadChainFromFile, "chains", new Set(["json", "yaml"]));
return _1.BaseChain.deserialize(serialized, values);
};
/**
* Load a chain from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("lc://chains/hello-world/chain.json");
* const res = await chain.call({ topic: "my favorite color" });
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("/path/to/chain.json");
* ```
*/
const loadChain = async (uri, values = {}) => {
const hubResult = await (0, hub_1.loadFromHub)(uri, (uri) => loadChainFromFile(uri, values), "chains", new Set(["json", "yaml"]), values);
if (hubResult) {
return hubResult;
}
return loadChainFromFile(uri);
});
return loadChainFromFile(uri, values);
};
exports.loadChain = loadChain;
//# sourceMappingURL=load.js.map
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -17,3 +8,3 @@ const globals_1 = require("@jest/globals");

const load_1 = require("../load");
(0, globals_1.test)("Test OpenAI", () => __awaiter(void 0, void 0, void 0, function* () {
(0, globals_1.test)("Test OpenAI", async () => {
const model = new openai_1.OpenAI({});

@@ -25,10 +16,16 @@ const prompt = new prompt_1.PromptTemplate({

const chain = new llm_chain_1.LLMChain({ prompt, llm: model });
const res = yield chain.call({ foo: "my favorite color" });
const res = await chain.call({ foo: "my favorite color" });
console.log({ res });
}));
(0, globals_1.test)("Load chain from hub", () => __awaiter(void 0, void 0, void 0, function* () {
const chain = yield (0, load_1.loadChain)("lc://chains/hello-world/chain.json");
const res = yield chain.call({ topic: "my favorite color" });
});
(0, globals_1.test)("Load chain from hub", async () => {
const chain = await (0, load_1.loadChain)("lc://chains/hello-world/chain.json");
const res = await chain.call({ topic: "my favorite color" });
console.log({ res });
}));
});
(0, globals_1.test)("Test ConversationChain", async () => {
const model = new openai_1.OpenAI({});
const chain = new llm_chain_1.ConversationChain({ llm: model });
const res = await chain.call({ input: "my favorite color" });
console.log({ res });
});
//# sourceMappingURL=llm_chain.test.js.map

@@ -5,20 +5,61 @@ import { LLMCallbackManager, LLMResult } from "./index";

} & Record<string, any>;
/**
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
*/
export declare abstract class BaseLLM {
/**
* The name of the LLM class
*/
name: string;
cache?: boolean;
callbackManager: LLMCallbackManager;
/**
* Whether to print out response text.
*/
verbose?: boolean;
constructor(callbackManager?: LLMCallbackManager, verbose?: boolean);
/**
* Run the LLM on the given prompts and input.
*/
abstract _generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
/** @ignore */
_generateUncached(prompts: string[], stop?: string[]): Promise<LLMResult>;
/**
* Run the LLM on the given propmts an input, handling caching.
*/
generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
/**
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
*/
call(prompt: string, stop?: string[]): Promise<string>;
/**
* Get the identifying parameters of the LLM.
*/
_identifyingParams(): Record<string, any>;
/**
* Return the string type key uniquely identifying this class of LLM.
*/
abstract _llmType(): string;
/**
* Return a json-like object representing this LLM.
*/
serialize(): SerializedLLM;
/**
* Load an LLM from a json-like object describing it.
*/
static deserialize(data: SerializedLLM): Promise<BaseLLM>;
}
/**
* LLM class that provides a simpler interface to subclass than {@link BaseLLM}.
*
* Requires only implementing a simpler {@link _call} method instead of {@link _generate}.
*
* @augments BaseLLM
*/
export declare abstract class LLM extends BaseLLM {
/**
* Run the LLM on the given prompt and input.
*/
abstract _call(prompt: string, stop?: string[]): Promise<string>;
_generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
}
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -39,4 +19,10 @@ exports.LLM = exports.BaseLLM = void 0;

const cache = new cache_1.InMemoryCache();
/**
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
*/
class BaseLLM {
constructor(callbackManager, verbose) {
/**
* The name of the LLM class
*/
Object.defineProperty(this, "name", {

@@ -60,2 +46,5 @@ enumerable: true,

});
/**
* Whether to print out response text.
*/
Object.defineProperty(this, "verbose", {

@@ -70,59 +59,63 @@ enumerable: true,

}
_generateUncached(prompts, stop) {
return __awaiter(this, void 0, void 0, function* () {
this.callbackManager.handleStart({ name: this.name }, prompts, this.verbose);
let output;
try {
output = yield this._generate(prompts, stop);
}
catch (err) {
this.callbackManager.handleError(`${err}`, this.verbose);
throw err;
}
this.callbackManager.handleEnd(output, this.verbose);
return output;
});
/** @ignore */
async _generateUncached(prompts, stop) {
this.callbackManager.handleStart({ name: this.name }, prompts, this.verbose);
let output;
try {
output = await this._generate(prompts, stop);
}
catch (err) {
this.callbackManager.handleError(`${err}`, this.verbose);
throw err;
}
this.callbackManager.handleEnd(output, this.verbose);
return output;
}
generate(prompts, stop) {
/**
* Run the LLM on the given propmts an input, handling caching.
*/
async generate(prompts, stop) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
if (!Array.isArray(prompts)) {
throw new Error("Argument 'prompts' is expected to be a string[]");
if (!Array.isArray(prompts)) {
throw new Error("Argument 'prompts' is expected to be a string[]");
}
if (this.cache === true && cache === null) {
throw new Error("Requested cache, but no cache found");
}
if (cache === null || this.cache === false) {
return this._generateUncached(prompts, stop);
}
const params = this.serialize();
params.stop = stop;
const llmStringKey = `${Object.entries(params).sort()}`;
const missingPromptIndices = [];
const generations = prompts.map((prompt, index) => {
const result = cache.lookup(prompt, llmStringKey);
if (!result) {
missingPromptIndices.push(index);
}
if (this.cache === true && cache === null) {
throw new Error("Requested cache, but no cache found");
}
if (cache === null || this.cache === false) {
return this._generateUncached(prompts, stop);
}
const params = this.serialize();
params.stop = stop;
const llmStringKey = `${Object.entries(params).sort()}`;
const missingPromptIndices = [];
const generations = prompts.map((prompt, index) => {
const result = cache.lookup(prompt, llmStringKey);
if (!result) {
missingPromptIndices.push(index);
}
return result;
return result;
});
let llmOutput = {};
if (missingPromptIndices.length > 0) {
const results = await this._generateUncached(missingPromptIndices.map((i) => prompts[i]), stop);
results.generations.forEach((generation, index) => {
const promptIndex = missingPromptIndices[index];
generations[promptIndex] = generation;
cache.update(prompts[promptIndex], llmStringKey, generation);
});
let llmOutput = {};
if (missingPromptIndices.length > 0) {
const results = yield this._generateUncached(missingPromptIndices.map((i) => prompts[i]), stop);
results.generations.forEach((generation, index) => {
const promptIndex = missingPromptIndices[index];
generations[promptIndex] = generation;
cache.update(prompts[promptIndex], llmStringKey, generation);
});
llmOutput = (_a = results.llmOutput) !== null && _a !== void 0 ? _a : {};
}
return { generations, llmOutput };
});
llmOutput = (_a = results.llmOutput) !== null && _a !== void 0 ? _a : {};
}
return { generations, llmOutput };
}
call(prompt, stop) {
return __awaiter(this, void 0, void 0, function* () {
const { generations } = yield this.generate([prompt], stop);
return generations[0][0].text;
});
/**
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
*/
async call(prompt, stop) {
const { generations } = await this.generate([prompt], stop);
return generations[0][0].text;
}
/**
* Get the identifying parameters of the LLM.
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any

@@ -132,29 +125,41 @@ _identifyingParams() {

}
/**
* Return a json-like object representing this LLM.
*/
serialize() {
return Object.assign(Object.assign({}, this._identifyingParams()), { _type: this._llmType() });
return {
...this._identifyingParams(),
_type: this._llmType(),
};
}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const { _type } = data, rest = __rest(data, ["_type"]);
const Cls = {
openai: index_1.OpenAI,
}[_type];
if (Cls === undefined) {
throw new Error(`Cannot load LLM with type ${_type}`);
}
return new Cls(rest);
});
/**
* Load an LLM from a json-like object describing it.
*/
static async deserialize(data) {
const { _type, ...rest } = data;
const Cls = {
openai: index_1.OpenAI,
}[_type];
if (Cls === undefined) {
throw new Error(`Cannot load LLM with type ${_type}`);
}
return new Cls(rest);
}
}
exports.BaseLLM = BaseLLM;
/**
* LLM class that provides a simpler interface to subclass than {@link BaseLLM}.
*
* Requires only implementing a simpler {@link _call} method instead of {@link _generate}.
*
* @augments BaseLLM
*/
class LLM extends BaseLLM {
_generate(prompts, stop) {
return __awaiter(this, void 0, void 0, function* () {
const generations = [];
for (let i = 0; i < prompts.length; i += 1) {
const text = yield this._call(prompts[i], stop);
generations.push([{ text }]);
}
return { generations };
});
async _generate(prompts, stop) {
const generations = [];
for (let i = 0; i < prompts.length; i += 1) {
const text = await this._call(prompts[i], stop);
generations.push([{ text }]);
}
return { generations };
}

@@ -161,0 +166,0 @@ }

@@ -11,9 +11,28 @@ export { BaseLLM, LLM, SerializedLLM } from "./base";

};
/**
* Output of a single generation.
*/
export type Generation = {
/**
* Generated text output
*/
text: string;
/**
* Raw generation info response from the provider.
* May include things like reason for finishing (e.g. in {@link OpenAI})
*/
generationInfo?: Record<string, any>;
};
/**
* Contains all relevant information returned by an LLM.
*/
export type LLMResult = {
/**
* List of the things generated. Each input could have multiple {@link Generation | generations}, hence this is a list of lists.
*/
generations: Generation[][];
/**
* Dictionary of arbitrary LLM-provider specific output.
*/
llmOutput?: Record<string, any>;
};
import { BaseLLM } from "./base";
export declare const loadLLM: typeof BaseLLM.deserialize;
export declare const loadLLMFromFile: (file: string) => Promise<BaseLLM>;
/**
* Load an LLM from a local file.
*
* @example
* ```ts
* import { loadLLM } from "langchain/llms";
* const model = await loadLLM("/path/to/llm.json");
* ```
*/
export declare const loadLLM: (file: string) => Promise<BaseLLM>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadLLMFromFile = exports.loadLLM = void 0;
exports.loadLLM = void 0;
const base_1 = require("./base");
const util_1 = require("../util");
exports.loadLLM = base_1.BaseLLM.deserialize;
const loadLLMFromFile = (file) => (0, exports.loadLLM)((0, util_1.parseFileConfig)(file));
exports.loadLLMFromFile = loadLLMFromFile;
/**
* Load an LLM from a local file.
*
* @example
* ```ts
* import { loadLLM } from "langchain/llms";
* const model = await loadLLM("/path/to/llm.json");
* ```
*/
const loadLLM = (file) => base_1.BaseLLM.deserialize((0, util_1.parseFileConfig)(file));
exports.loadLLM = loadLLM;
//# sourceMappingURL=load.js.map
import type { CreateCompletionRequest } from "openai";
import { BaseLLM, LLMResult, LLMCallbackManager } from ".";
interface ModelParams {
/** Sampling temperature to use */
temperature: number;
/**
* Maximum number of tokens to generate in the completion. -1 returns as many
* tokens as possible given the prompt and the model's maximum context size.
*/
maxTokens: number;
/** Total probability mass of tokens to consider at each step */
topP: number;
/** Penalizes repeated tokens according to frequency */
frequencyPenalty: number;
/** Penalizes repeated tokens */
presencePenalty: number;
/** Number of completions to generate for each prompt */
n: number;
/** Generates `bestOf` completions server side and returns the "best" */
bestOf: number;
/** Dictionary used to adjust the probability of specific tokens being generated */
logitBias?: Record<string, number>;
}
/**
* Input to OpenAI class.
* @augments ModelParams
*/
interface OpenAIInput extends ModelParams {
/** Model name to use */
modelName: string;
/** Holds any additional parameters that are valid to pass to {@link
* https://platform.openai.com/docs/api-reference/completions/create |
* `openai.createCompletion`} that are not explicitly specified on this class.
*/
modelKwargs?: Kwargs;
/** Batch size to use when passing multiple documents to generate */
batchSize: number;
/** Maximum number of retries to make when generating */
maxRetries: number;
/** List of stop words to use when generating */
stop?: string[];
}
type Kwargs = Record<string, any>;
export declare class OpenAI extends BaseLLM implements ModelParams {
/**
* Wrapper around OpenAI large language models.
*
* To use you should have the `openai` package installed, with the
* `OPENAI_API_KEY` environment variable set.
*
* @remarks
* Any parameters that are valid to be passed to {@link
* https://platform.openai.com/docs/api-reference/completions/create |
* `openai.createCompletion`} can be passed through {@link modelKwargs}, even
* if not explicitly available on this class.
*
* @augments BaseLLM
* @augments OpenAIInput
*/
export declare class OpenAI extends BaseLLM implements OpenAIInput {
temperature: number;

@@ -29,13 +74,14 @@ maxTokens: number;

private client;
constructor(fields?: Partial<ModelParams> & {
constructor(fields?: Partial<OpenAIInput> & {
callbackManager?: LLMCallbackManager;
verbose?: boolean;
modelName?: string;
modelKwargs?: Kwargs;
openAIApiKey?: string;
batchSize?: number;
maxRetries?: number;
stop?: string[];
});
/**
* Get the parameters used to invoke the model
*/
invocationParams(): CreateCompletionRequest & Kwargs;
/**
* Get the identifyin parameters for the model
*/
identifyingParams(): {

@@ -60,3 +106,19 @@ model: string;

};
/**
* Call out to OpenAI's endpoint with k unique prompts
*
* @param prompts - The prompts to pass into the model.
* @param [stop] - Optional list of stop words to use when generating.
*
* @returns The full LLM output.
*
* @example
* ```ts
* import { OpenAI } from "langchain/llms";
* const openai = new OpenAI();
* const response = await openai.generate(["Tell me a joke."]);
* ```
*/
_generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
/** @ignore */
completionWithRetry(request: CreateCompletionRequest): Promise<import("axios").AxiosResponse<import("openai").CreateCompletionResponse, any>>;

@@ -63,0 +125,0 @@ _llmType(): string;

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.OpenAI = void 0;
const exponential_backoff_1 = require("exponential-backoff");
const util_1 = require("../util");
const _1 = require(".");

@@ -18,3 +10,3 @@ let Configuration = null;

try {
// eslint-disable-next-line global-require
// eslint-disable-next-line global-require,import/no-extraneous-dependencies
({ Configuration, OpenAIApi } = require("openai"));

@@ -25,9 +17,17 @@ }

}
const chunkArray = (arr, chunkSize) => arr.reduce((chunks, elem, index) => {
const chunkIndex = Math.floor(index / chunkSize);
const chunk = chunks[chunkIndex] || [];
// eslint-disable-next-line no-param-reassign
chunks[chunkIndex] = chunk.concat([elem]);
return chunks;
}, []);
/**
* Wrapper around OpenAI large language models.
*
* To use you should have the `openai` package installed, with the
* `OPENAI_API_KEY` environment variable set.
*
* @remarks
* Any parameters that are valid to be passed to {@link
* https://platform.openai.com/docs/api-reference/completions/create |
* `openai.createCompletion`} can be passed through {@link modelKwargs}, even
* if not explicitly available on this class.
*
* @augments BaseLLM
* @augments OpenAIInput
*/
class OpenAI extends _1.BaseLLM {

@@ -122,3 +122,3 @@ constructor(fields) {

if (Configuration === null || OpenAIApi === null) {
throw new Error("Please install openai as a dependency with, e.g. `npm install -S openai`");
throw new Error("Please install openai as a dependency with, e.g. `npm i openai`");
}

@@ -143,50 +143,88 @@ this.modelName = (_a = fields === null || fields === void 0 ? void 0 : fields.modelName) !== null && _a !== void 0 ? _a : this.modelName;

}
/**
* Get the parameters used to invoke the model
*/
invocationParams() {
return Object.assign({ model: this.modelName, temperature: this.temperature, max_tokens: this.maxTokens, top_p: this.topP, frequency_penalty: this.frequencyPenalty, presence_penalty: this.presencePenalty, n: this.n, best_of: this.bestOf, logit_bias: this.logitBias, stop: this.stop }, this.modelKwargs);
return {
model: this.modelName,
temperature: this.temperature,
max_tokens: this.maxTokens,
top_p: this.topP,
frequency_penalty: this.frequencyPenalty,
presence_penalty: this.presencePenalty,
n: this.n,
best_of: this.bestOf,
logit_bias: this.logitBias,
stop: this.stop,
...this.modelKwargs,
};
}
/**
* Get the identifyin parameters for the model
*/
identifyingParams() {
return Object.assign({ model_name: this.modelName }, this.invocationParams());
return {
model_name: this.modelName,
...this.invocationParams(),
};
}
_generate(prompts, stop) {
/**
* Call out to OpenAI's endpoint with k unique prompts
*
* @param prompts - The prompts to pass into the model.
* @param [stop] - Optional list of stop words to use when generating.
*
* @returns The full LLM output.
*
* @example
* ```ts
* import { OpenAI } from "langchain/llms";
* const openai = new OpenAI();
* const response = await openai.generate(["Tell me a joke."]);
* ```
*/
async _generate(prompts, stop) {
var _a, _b, _c, _d;
return __awaiter(this, void 0, void 0, function* () {
const subPrompts = chunkArray(prompts, this.batchSize);
const choices = [];
const tokenUsage = {};
if (this.stop && stop) {
throw new Error("Stop found in input and default params");
const subPrompts = (0, util_1.chunkArray)(prompts, this.batchSize);
const choices = [];
const tokenUsage = {};
if (this.stop && stop) {
throw new Error("Stop found in input and default params");
}
const params = this.invocationParams();
params.stop = stop !== null && stop !== void 0 ? stop : params.stop;
for (let i = 0; i < subPrompts.length; i += 1) {
const { data } = await this.completionWithRetry({
...params,
prompt: subPrompts[i],
});
choices.push(...data.choices);
const { completion_tokens: completionTokens, prompt_tokens: promptTokens, total_tokens: totalTokens, } = (_a = data.usage) !== null && _a !== void 0 ? _a : {};
if (completionTokens) {
tokenUsage.completionTokens =
((_b = tokenUsage.completionTokens) !== null && _b !== void 0 ? _b : 0) + completionTokens;
}
const params = this.invocationParams();
params.stop = stop !== null && stop !== void 0 ? stop : params.stop;
for (let i = 0; i < subPrompts.length; i += 1) {
const { data } = yield this.completionWithRetry(Object.assign(Object.assign({}, params), { prompt: subPrompts[i] }));
choices.push(...data.choices);
const { completion_tokens: completionTokens, prompt_tokens: promptTokens, total_tokens: totalTokens, } = (_a = data.usage) !== null && _a !== void 0 ? _a : {};
if (completionTokens) {
tokenUsage.completionTokens =
((_b = tokenUsage.completionTokens) !== null && _b !== void 0 ? _b : 0) + completionTokens;
}
if (promptTokens) {
tokenUsage.promptTokens = ((_c = tokenUsage.promptTokens) !== null && _c !== void 0 ? _c : 0) + promptTokens;
}
if (totalTokens) {
tokenUsage.totalTokens = ((_d = tokenUsage.totalTokens) !== null && _d !== void 0 ? _d : 0) + totalTokens;
}
if (promptTokens) {
tokenUsage.promptTokens = ((_c = tokenUsage.promptTokens) !== null && _c !== void 0 ? _c : 0) + promptTokens;
}
const generations = chunkArray(choices, this.n).map((promptChoices) => promptChoices.map((choice) => {
var _a;
return ({
text: (_a = choice.text) !== null && _a !== void 0 ? _a : "",
generationInfo: {
finishReason: choice.finish_reason,
logprobs: choice.logprobs,
},
});
}));
return {
generations,
llmOutput: { tokenUsage },
};
});
if (totalTokens) {
tokenUsage.totalTokens = ((_d = tokenUsage.totalTokens) !== null && _d !== void 0 ? _d : 0) + totalTokens;
}
}
const generations = (0, util_1.chunkArray)(choices, this.n).map((promptChoices) => promptChoices.map((choice) => {
var _a;
return ({
text: (_a = choice.text) !== null && _a !== void 0 ? _a : "",
generationInfo: {
finishReason: choice.finish_reason,
logprobs: choice.logprobs,
},
});
}));
return {
generations,
llmOutput: { tokenUsage },
};
}
/** @ignore */
completionWithRetry(request) {

@@ -193,0 +231,0 @@ const makeCompletionRequest = () => this.client.createCompletion(request);

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const globals_1 = require("@jest/globals");
const openai_1 = require("../openai");
(0, globals_1.test)("Test OpenAI", () => __awaiter(void 0, void 0, void 0, function* () {
(0, globals_1.test)("Test OpenAI", async () => {
const model = new openai_1.OpenAI({ maxTokens: 5 });
const res = yield model.call("Print hello world");
const res = await model.call("Print hello world");
console.log({ res });
}));
});
//# sourceMappingURL=openai.test.js.map

@@ -6,6 +6,20 @@ import { BaseOutputParser } from "./parser";

export type InputValues = Record<string, any>;
/**
* Input common to all prompt templates.
*/
export interface BasePromptTemplateInput {
/**
* A list of variable names the prompt template expects
*/
inputVariables: string[];
/**
* How to parse the output of calling an LLM on this formatted prompt
*/
outputParser?: BaseOutputParser;
}
/**
* Base class for prompt templates. Exposes a format method that returns a
* string prompt given a set of input values.
* @augments BasePromptTemplateInput
*/
export declare abstract class BasePromptTemplate implements BasePromptTemplateInput {

@@ -15,7 +29,32 @@ inputVariables: string[];

constructor(input: BasePromptTemplateInput);
/**
* Format the prompt given the input values.
*
* @param inputValues - A dictionary of arguments to be passed to the prompt template.
* @returns A formatted prompt string.
*
* @example
* ```ts
* prompt.format({ foo: "bar" });
* ```
*/
abstract format(values: InputValues): string;
/**
* Return the string type key uniquely identifying this class of prompt template.
*/
abstract _getPromptType(): string;
/**
* Return a json-like object representing this prompt template.
*/
abstract serialize(): SerializedBasePromptTemplate;
/**
* Load a prompt template from a json-like object describing it.
*
* @remarks
* Deserializing needs to be async because templates (e.g. {@link FewShotPromptTemplate}) can
* reference remote resources that we read asynchronously with a web
* request.
*/
static deserialize(data: SerializedBasePromptTemplate): Promise<BasePromptTemplate>;
}
export {};
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -15,2 +6,7 @@ exports.BasePromptTemplate = void 0;

const templateClasses = [index_1.PromptTemplate, index_1.FewShotPromptTemplate];
/**
* Base class for prompt templates. Exposes a format method that returns a
* string prompt given a set of input values.
* @augments BasePromptTemplateInput
*/
class BasePromptTemplate {

@@ -36,18 +32,21 @@ constructor(input) {

}
// Deserializing needs to be async because templates (e.g. few_shot) can
// reference remote resources that we read asynchronously with a web
// request.
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
switch (data._type) {
case "prompt":
return index_1.PromptTemplate.deserialize(data);
case undefined:
return index_1.PromptTemplate.deserialize(Object.assign(Object.assign({}, data), { _type: "prompt" }));
case "few_shot":
return index_1.FewShotPromptTemplate.deserialize(data);
default:
throw new Error(`Invalid prompt type in config: ${data._type}`);
}
});
/**
* Load a prompt template from a json-like object describing it.
*
* @remarks
* Deserializing needs to be async because templates (e.g. {@link FewShotPromptTemplate}) can
* reference remote resources that we read asynchronously with a web
* request.
*/
static async deserialize(data) {
switch (data._type) {
case "prompt":
return index_1.PromptTemplate.deserialize(data);
case undefined:
return index_1.PromptTemplate.deserialize({ ...data, _type: "prompt" });
case "few_shot":
return index_1.FewShotPromptTemplate.deserialize(data);
default:
throw new Error(`Invalid prompt type in config: ${data._type}`);
}
}

@@ -54,0 +53,0 @@ }

@@ -22,11 +22,46 @@ import { BasePromptTemplate, InputValues, BasePromptTemplateInput } from "./index";

export interface FewShotPromptTemplateInput extends BasePromptTemplateInput {
/**
* Examples to format into the prompt. Exactly one of this or
* {@link exampleSelector} must be
* provided.
*/
examples?: Example[];
/**
* An {@link ExampleSelector} Examples to format into the prompt. Exactly one of this or
* {@link examples} must be
* provided.
*/
exampleSelector?: ExampleSelector;
/**
* An {@link PromptTemplate} used to format a single example.
*/
examplePrompt: PromptTemplate;
exampleSelector?: ExampleSelector;
/**
* String separator used to join the prefix, the examples, and suffix.
*/
exampleSeparator: string;
/**
* A prompt template string to put before the examples.
*
* @defaultValue `""`
*/
prefix: string;
/**
* A prompt template string to put after the examples.
*/
suffix: string;
/**
* The format of the prompt template. Options are: 'f-string', 'jinja-2'
*/
templateFormat: TemplateFormat;
/**
* Whether or not to try validating the template on initialization.
*/
validateTemplate?: boolean;
}
/**
* Prompt template that contains few-shot examples.
* @augments BasePromptTemplate
* @augments FewShotPromptTemplateInput
*/
export declare class FewShotPromptTemplate extends BasePromptTemplate implements FewShotPromptTemplateInput {

@@ -33,0 +68,0 @@ examples?: InputValues[];

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -18,2 +9,7 @@ exports.FewShotPromptTemplate = void 0;

const parser_1 = require("./parser");
/**
* Prompt template that contains few-shot examples.
* @augments BasePromptTemplate
* @augments FewShotPromptTemplateInput
*/
class FewShotPromptTemplate extends index_1.BasePromptTemplate {

@@ -116,26 +112,24 @@ constructor(input) {

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const serializedPrompt = (0, util_1.resolveConfigFromFile)("example_prompt", data);
const examplePrompt = yield prompt_1.PromptTemplate.deserialize(serializedPrompt);
let examples;
if (typeof data.examples === "string") {
examples = (0, util_1.parseFileConfig)(data.examples, [".json", ".yml", ".yaml"]);
}
else if (Array.isArray(data.examples)) {
examples = data.examples;
}
else {
throw new Error("Invalid examples format. Only list or string are supported.");
}
return new FewShotPromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && parser_1.BaseOutputParser.deserialize(data.output_parser),
examplePrompt,
examples,
exampleSeparator: data.example_separator,
prefix: (0, util_1.resolveTemplateFromFile)("prefix", data),
suffix: (0, util_1.resolveTemplateFromFile)("suffix", data),
templateFormat: data.template_format,
});
static async deserialize(data) {
const serializedPrompt = (0, util_1.resolveConfigFromFile)("example_prompt", data);
const examplePrompt = await prompt_1.PromptTemplate.deserialize(serializedPrompt);
let examples;
if (typeof data.examples === "string") {
examples = (0, util_1.parseFileConfig)(data.examples, [".json", ".yml", ".yaml"]);
}
else if (Array.isArray(data.examples)) {
examples = data.examples;
}
else {
throw new Error("Invalid examples format. Only list or string are supported.");
}
return new FewShotPromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && parser_1.BaseOutputParser.deserialize(data.output_parser),
examplePrompt,
examples,
exampleSeparator: data.example_separator,
prefix: (0, util_1.resolveTemplateFromFile)("prefix", data),
suffix: (0, util_1.resolveTemplateFromFile)("suffix", data),
templateFormat: data.template_format,
});

@@ -142,0 +136,0 @@ }

export { BasePromptTemplate, BasePromptTemplateInput, SerializedBasePromptTemplate, InputValues, } from "./base";
export { PromptTemplate, PromptTemplateInput, SerializedPromptTemplate, } from "./prompt";
export { FewShotPromptTemplate, FewShotPromptTemplateInput, SerializedFewShotTemplate, } from "./few_shot";
export { loadPrompt } from "./load";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.FewShotPromptTemplate = exports.PromptTemplate = exports.BasePromptTemplate = void 0;
exports.loadPrompt = exports.FewShotPromptTemplate = exports.PromptTemplate = exports.BasePromptTemplate = void 0;
var base_1 = require("./base");

@@ -10,2 +10,4 @@ Object.defineProperty(exports, "BasePromptTemplate", { enumerable: true, get: function () { return base_1.BasePromptTemplate; } });

Object.defineProperty(exports, "FewShotPromptTemplate", { enumerable: true, get: function () { return few_shot_1.FewShotPromptTemplate; } });
var load_1 = require("./load");
Object.defineProperty(exports, "loadPrompt", { enumerable: true, get: function () { return load_1.loadPrompt; } });
//# sourceMappingURL=index.js.map
import { BasePromptTemplate } from ".";
/**
* Load a prompt from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("lc://prompts/hello-world/prompt.yaml");
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("/path/to/prompt.json");
* ```
*/
export declare const loadPrompt: (uri: string) => Promise<BasePromptTemplate>;
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -16,5 +7,22 @@ exports.loadPrompt = void 0;

const util_1 = require("../util");
const loadPromptFromFile = (file) => __awaiter(void 0, void 0, void 0, function* () { return _1.BasePromptTemplate.deserialize((0, util_1.parseFileConfig)(file)); });
const loadPrompt = (uri) => __awaiter(void 0, void 0, void 0, function* () {
const hubResult = yield (0, hub_1.loadFromHub)(uri, loadPromptFromFile, "prompts", new Set(["py", "json", "yaml"]));
const loadPromptFromFile = async (file) => _1.BasePromptTemplate.deserialize((0, util_1.parseFileConfig)(file));
/**
* Load a prompt from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("lc://prompts/hello-world/prompt.yaml");
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("/path/to/prompt.json");
* ```
*/
const loadPrompt = async (uri) => {
const hubResult = await (0, hub_1.loadFromHub)(uri, loadPromptFromFile, "prompts", new Set(["py", "json", "yaml"]));
if (hubResult) {

@@ -24,4 +32,4 @@ return hubResult;

return loadPromptFromFile(uri);
});
};
exports.loadPrompt = loadPrompt;
//# sourceMappingURL=load.js.map
export type SerializedOutputParser = SerializedRegexParser | SerializedCommaSeparatedListOutputParser;
/**
* Class to parse the output of an LLM call.
*/
export declare abstract class BaseOutputParser {
/**
* Parse the output of an LLM call.
*
* @param text - LLM output to parse.
* @returns Parsed output.
*/
abstract parse(text: string): string | string[] | Record<string, string>;
/**
* Return the string type key uniquely identifying this class of parser
*/
_type(): string;
/**
* Return a json-like object representing this output parser.
*/
abstract serialize(): SerializedOutputParser;
/**
* Load an output parser from a json-like object describing the parser.
*/
static deserialize(data: SerializedOutputParser): BaseOutputParser;
}
/**
* Class to parse the output of an LLM call to a list.
* @augments BaseOutputParser
*/
export declare abstract class ListOutputParser extends BaseOutputParser {

@@ -14,2 +36,6 @@ abstract parse(text: string): string[];

};
/**
* Class to parse the output of an LLM call as a comma-separated list.
* @augments ListOutputParser
*/
export declare class CommaSeparatedListOutputParser extends ListOutputParser {

@@ -26,2 +52,6 @@ parse(text: string): string[];

};
/**
* Class to parse the output of an LLM call into a dictionary.
* @augments BaseOutputParser
*/
export declare class RegexParser extends BaseOutputParser {

@@ -28,0 +58,0 @@ regex: string | RegExp;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.RegexParser = exports.CommaSeparatedListOutputParser = exports.ListOutputParser = exports.BaseOutputParser = void 0;
/**
* Class to parse the output of an LLM call.
*/
class BaseOutputParser {
/**
* Return the string type key uniquely identifying this class of parser
*/
_type() {
throw new Error("_type not implemented");
}
/**
* Load an output parser from a json-like object describing the parser.
*/
static deserialize(data) {

@@ -19,5 +28,13 @@ switch (data._type) {

exports.BaseOutputParser = BaseOutputParser;
/**
* Class to parse the output of an LLM call to a list.
* @augments BaseOutputParser
*/
class ListOutputParser extends BaseOutputParser {
}
exports.ListOutputParser = ListOutputParser;
/**
* Class to parse the output of an LLM call as a comma-separated list.
* @augments ListOutputParser
*/
class CommaSeparatedListOutputParser extends ListOutputParser {

@@ -37,2 +54,6 @@ parse(text) {

exports.CommaSeparatedListOutputParser = CommaSeparatedListOutputParser;
/**
* Class to parse the output of an LLM call into a dictionary.
* @augments BaseOutputParser
*/
class RegexParser extends BaseOutputParser {

@@ -39,0 +60,0 @@ constructor(regex, outputKeys, defaultOutputKey) {

@@ -12,7 +12,39 @@ import { BasePromptTemplate, BasePromptTemplateInput, InputValues } from "./index";

};
/**
* Inputs to create a {@link PromptTemplate}
* @augments BasePromptTemplateInput
*/
export interface PromptTemplateInput extends BasePromptTemplateInput {
/**
* The propmt template
*/
template: string;
/**
* The format of the prompt template. Options are 'f-string', 'jinja-2'
*
* @defaultValue 'f-string'
*/
templateFormat?: TemplateFormat;
/**
* Whether or not to try validating the template on initialization
*
* @defaultValue `true`
*/
validateTemplate?: boolean;
}
/**
* Schema to represent a basic prompt for an LLM.
* @augments BasePromptTemplate
* @augments PromptTemplateInput
*
* @example
* ```ts
* import { PromptTemplate } from "@langchain/prompt";
*
* const prompt = new PromptTemplate({
* inputVariables: ["foo"],
* template: "Say {foo}",
* });
* ```
*/
export declare class PromptTemplate extends BasePromptTemplate implements PromptTemplateInput {

@@ -25,3 +57,19 @@ template: string;

format(values: InputValues): string;
/**
* Take examples in list format with prefix and suffix to create a prompt.
*
* Intendend to be used a a way to dynamically create a prompt from examples.
*
* @param examples - List of examples to use in the prompt.
* @param suffix - String to go after the list of examples. Should generally set up the user's input.
* @param inputVariables - A list of variable names the final prompt template will expect
* @param exampleSeparator - The separator to use in between examples
* @param prefix - String that should go before any examples. Generally includes examples.
*
* @returns The final prompt template generated.
*/
static fromExamples(examples: string[], suffix: string, inputVariables: string[], exampleSeparator?: string, prefix?: string): PromptTemplate;
/**
* Load prompt template from a template f-string
*/
static fromTemplate(template: string): PromptTemplate;

@@ -28,0 +76,0 @@ serialize(): SerializedPromptTemplate;

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });

@@ -17,2 +8,17 @@ exports.PromptTemplate = void 0;

const parser_1 = require("./parser");
/**
* Schema to represent a basic prompt for an LLM.
* @augments BasePromptTemplate
* @augments PromptTemplateInput
*
* @example
* ```ts
* import { PromptTemplate } from "@langchain/prompt";
*
* const prompt = new PromptTemplate({
* inputVariables: ["foo"],
* template: "Say {foo}",
* });
* ```
*/
class PromptTemplate extends index_1.BasePromptTemplate {

@@ -50,2 +56,15 @@ constructor(input) {

}
/**
* Take examples in list format with prefix and suffix to create a prompt.
*
* Intendend to be used a a way to dynamically create a prompt from examples.
*
* @param examples - List of examples to use in the prompt.
* @param suffix - String to go after the list of examples. Should generally set up the user's input.
* @param inputVariables - A list of variable names the final prompt template will expect
* @param exampleSeparator - The separator to use in between examples
* @param prefix - String that should go before any examples. Generally includes examples.
*
* @returns The final prompt template generated.
*/
static fromExamples(examples, suffix, inputVariables, exampleSeparator = "\n\n", prefix = "") {

@@ -58,2 +77,5 @@ const template = [prefix, ...examples, suffix].join(exampleSeparator);

}
/**
* Load prompt template from a template f-string
*/
static fromTemplate(template) {

@@ -81,12 +103,10 @@ const names = new Set();

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const res = new PromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && parser_1.BaseOutputParser.deserialize(data.output_parser),
template: (0, util_1.resolveTemplateFromFile)("template", data),
templateFormat: data.template_format,
});
return res;
static async deserialize(data) {
const res = new PromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && parser_1.BaseOutputParser.deserialize(data.output_parser),
template: (0, util_1.resolveTemplateFromFile)("template", data),
templateFormat: data.template_format,
});
return res;
}

@@ -93,0 +113,0 @@ }

"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -19,13 +10,13 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const PROMPTS_DIR = path_1.default.join(__dirname, "prompts");
(0, globals_1.test)("Load Hello World Prompt", () => __awaiter(void 0, void 0, void 0, function* () {
(0, globals_1.test)("Load Hello World Prompt", async () => {
const helloWorld = path_1.default.join(PROMPTS_DIR, "hello_world.yaml");
const prompt = yield (0, load_1.loadPrompt)(helloWorld);
const prompt = await (0, load_1.loadPrompt)(helloWorld);
(0, globals_1.expect)(prompt._getPromptType()).toBe("prompt");
(0, globals_1.expect)(prompt.format({})).toBe("Say hello world.");
}));
(0, globals_1.test)("Load hub prompt", () => __awaiter(void 0, void 0, void 0, function* () {
const prompt = yield (0, load_1.loadPrompt)("lc@abb92d8://prompts/hello-world/prompt.yaml");
});
(0, globals_1.test)("Load hub prompt", async () => {
const prompt = await (0, load_1.loadPrompt)("lc@abb92d8://prompts/hello-world/prompt.yaml");
(0, globals_1.expect)(prompt._getPromptType()).toBe("prompt");
(0, globals_1.expect)(prompt.format({})).toBe("Say hello world.");
}));
});
//# sourceMappingURL=load.test.js.map

@@ -1,1 +0,2 @@

export declare const loadFromHub: <T>(uri: string, loader: (a: string) => T, validPrefix: string, validSuffixes: Set<string>) => Promise<T | undefined>;
export type LoadValues = Record<string, any>;
export declare const loadFromHub: <T>(uri: string, loader: (a: string, values: LoadValues) => T, validPrefix: string, validSuffixes: Set<string>, values?: LoadValues) => Promise<T | undefined>;
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -24,3 +15,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

const URL_BASE = (_b = process.env.LANGCHAIN_HUB_URL_BASE) !== null && _b !== void 0 ? _b : "https://raw.githubusercontent.com/hwchase17/langchain-hub/";
const loadFromHub = (uri, loader, validPrefix, validSuffixes) => __awaiter(void 0, void 0, void 0, function* () {
const loadFromHub = async (uri, loader, validPrefix, validSuffixes, values = {}) => {
const match = uri.match(HUB_PATH_REGEX);

@@ -40,13 +31,13 @@ if (!match) {

const url = [URL_BASE, ref, remotePath].join("/");
const res = yield (0, index_1.fetchWithTimeout)(url, { timeout: 5000 });
const res = await (0, index_1.fetchWithTimeout)(url, { timeout: 5000 });
if (res.status !== 200) {
throw new Error(`Could not find file at ${url}`);
}
const text = yield res.text();
const text = await res.text();
const tmpdir = fs_1.default.mkdtempSync(path_1.default.join(os_1.default.tmpdir(), "langchain"));
const file = path_1.default.join(tmpdir, path_1.default.basename(remotePath));
fs_1.default.writeFileSync(file, text);
return loader(file);
});
return loader(file, values);
};
exports.loadFromHub = loadFromHub;
//# sourceMappingURL=hub.js.map

@@ -13,1 +13,2 @@ import { RequestInit } from "node-fetch";

export declare const parseFileConfig: (file: string, supportedTypes?: string[]) => any;
export declare const chunkArray: <T>(arr: T[], chunkSize: number) => T[][];

@@ -25,22 +25,2 @@ "use strict";

};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -50,3 +30,3 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

Object.defineProperty(exports, "__esModule", { value: true });
exports.parseFileConfig = exports.resolveConfigFromFile = exports.resolveTemplateFromFile = exports.fetchWithTimeout = void 0;
exports.chunkArray = exports.parseFileConfig = exports.resolveConfigFromFile = exports.resolveTemplateFromFile = exports.fetchWithTimeout = void 0;
const path_1 = __importDefault(require("path"));

@@ -56,11 +36,11 @@ const node_fetch_1 = __importDefault(require("node-fetch"));

const yaml = __importStar(require("yaml"));
const fetchWithTimeout = (url, init) => __awaiter(void 0, void 0, void 0, function* () {
const { timeout } = init, rest = __rest(init, ["timeout"]);
const fetchWithTimeout = async (url, init) => {
const { timeout, ...rest } = init;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeout);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const res = yield (0, node_fetch_1.default)(url, Object.assign(Object.assign({}, rest), { signal: controller.signal }));
const res = await (0, node_fetch_1.default)(url, { ...rest, signal: controller.signal });
clearTimeout(timeoutId);
return res;
});
};
exports.fetchWithTimeout = fetchWithTimeout;

@@ -110,2 +90,10 @@ const loadFileContents = (contents, format) => {

exports.parseFileConfig = parseFileConfig;
const chunkArray = (arr, chunkSize) => arr.reduce((chunks, elem, index) => {
const chunkIndex = Math.floor(index / chunkSize);
const chunk = chunks[chunkIndex] || [];
// eslint-disable-next-line no-param-reassign
chunks[chunkIndex] = chunk.concat([elem]);
return chunks;
}, []);
exports.chunkArray = chunkArray;
//# sourceMappingURL=index.js.map

@@ -7,3 +7,12 @@ import { ChainValues } from "../chains";

export interface StaticAgent {
/**
* Create a prompt for this class
*
* @param tools - List of tools the agent will have access to, used to format the prompt.
* @param fields - Additional fields used to format the prompt.
*
* @returns A PromptTemplate assembled from the given tools and fields.
* */
createPrompt(tools: Tool[], fields?: Record<string, any>): BasePromptTemplate;
/** Construct an agent from an LLM and a list of tools */
fromLLMAndTools(llm: BaseLLM, tools: Tool[], args?: Record<string, any>): Agent;

@@ -18,2 +27,9 @@ validateTools(_: Tool[]): void;

}
/**
* Class responsible for calling a language model and deciding an action.
*
* @remarks This is driven by an LLMChain. The prompt in the LLMChain *must*
* include a variable called "agent_scratchpad" where the agent can put its
* intermediary work.
*/
export declare abstract class Agent {

@@ -24,2 +40,5 @@ llmChain: LLMChain;

constructor(input: AgentInput);
/**
* Extract tool and tool input from LLM output.
*/
abstract extractToolAndInput(input: string): {

@@ -29,13 +48,48 @@ tool: string;

} | null;
/**
* Prefix to append the observation with.
*/
abstract observationPrefix(): string;
/**
* Prefix to append the LLM call with.
*/
abstract llmPrefix(): string;
/**
* Return the string type key uniquely identifying this class of agent.
*/
abstract _agentType(): string;
/**
* Prepare the agent for a new call, if needed
*/
prepareForNewCall(): void;
/**
* Validate that appropriate tools are passed in
*/
static validateTools(_: Tool[]): void;
_stop(): string[];
/**
* Name of tool to use to terminate the chain.
*/
finishToolName(): string;
/**
* Construct a scratchpad to let the agent continue its thought process
*/
private constructScratchPad;
private _plan;
/**
* Decide what to do given some input.
*
* @param steps - Steps the LLM has taken so far, along with observations from each.
* @param inputs - User inputs.
*
* @returns Action specifying what tool to use.
*/
plan(steps: AgentStep[], inputs: ChainValues): Promise<AgentAction | AgentFinish>;
/**
* Return response when agent has been stopped due to max iterations
*/
returnStoppedResponse(earlyStoppingMethod: StoppingMethod, steps: AgentStep[], inputs: ChainValues): Promise<AgentFinish>;
/**
* Load an agent from a json-like object describing it.
*/
static deserialize(data: SerializedAgent & {

@@ -42,0 +96,0 @@ llm?: BaseLLM;

@@ -1,10 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { ZeroShotAgent, } from "./index";

@@ -24,2 +15,9 @@ class ParseError extends Error {

export const staticImplements = (_) => { };
/**
* Class responsible for calling a language model and deciding an action.
*
* @remarks This is driven by an LLMChain. The prompt in the LLMChain *must*
* include a variable called "agent_scratchpad" where the agent can put its
* intermediary work.
*/
export class Agent {

@@ -48,3 +46,9 @@ constructor(input) {

}
/**
* Prepare the agent for a new call, if needed
*/
prepareForNewCall() { }
/**
* Validate that appropriate tools are passed in
*/
// eslint-disable-next-line no-unused-vars

@@ -55,5 +59,11 @@ static validateTools(_) { }

}
/**
* Name of tool to use to terminate the chain.
*/
finishToolName() {
return "Final Answer";
}
/**
* Construct a scratchpad to let the agent continue its thought process
*/
constructScratchPad(steps) {

@@ -67,62 +77,74 @@ return steps.reduce((thoughts, { action, observation }) => thoughts +

}
_plan(steps, inputs, suffix) {
return __awaiter(this, void 0, void 0, function* () {
const thoughts = this.constructScratchPad(steps);
const newInputs = Object.assign(Object.assign({}, inputs), { agent_scratchpad: suffix ? `${thoughts}${suffix}` : thoughts, stop: this._stop() });
const output = yield this.llmChain.predict(newInputs);
const parsed = this.extractToolAndInput(output);
if (!parsed) {
throw new ParseError(`Invalid output: ${output}`, output);
}
const action = {
tool: parsed.tool,
toolInput: parsed.input,
log: output,
};
if (action.tool === this.finishToolName()) {
return { returnValues: { output: action.toolInput }, log: action.log };
}
return action;
});
async _plan(steps, inputs, suffix) {
const thoughts = this.constructScratchPad(steps);
const newInputs = {
...inputs,
agent_scratchpad: suffix ? `${thoughts}${suffix}` : thoughts,
stop: this._stop(),
};
const output = await this.llmChain.predict(newInputs);
const parsed = this.extractToolAndInput(output);
if (!parsed) {
throw new ParseError(`Invalid output: ${output}`, output);
}
const action = {
tool: parsed.tool,
toolInput: parsed.input,
log: output,
};
if (action.tool === this.finishToolName()) {
return { returnValues: { output: action.toolInput }, log: action.log };
}
return action;
}
/**
* Decide what to do given some input.
*
* @param steps - Steps the LLM has taken so far, along with observations from each.
* @param inputs - User inputs.
*
* @returns Action specifying what tool to use.
*/
plan(steps, inputs) {
return this._plan(steps, inputs);
}
returnStoppedResponse(earlyStoppingMethod, steps, inputs) {
return __awaiter(this, void 0, void 0, function* () {
if (earlyStoppingMethod === "force") {
return {
returnValues: { output: "Agent stopped due to max iterations." },
log: "",
};
/**
* Return response when agent has been stopped due to max iterations
*/
async returnStoppedResponse(earlyStoppingMethod, steps, inputs) {
if (earlyStoppingMethod === "force") {
return {
returnValues: { output: "Agent stopped due to max iterations." },
log: "",
};
}
if (earlyStoppingMethod === "generate") {
try {
const action = await this._plan(steps, inputs, "\n\nI now need to return a final answer based on the previous steps:");
if ("returnValues" in action) {
return action;
}
return { returnValues: { output: action.log }, log: action.log };
}
if (earlyStoppingMethod === "generate") {
try {
const action = yield this._plan(steps, inputs, "\n\nI now need to return a final answer based on the previous steps:");
if ("returnValues" in action) {
return action;
}
return { returnValues: { output: action.log }, log: action.log };
catch (err) {
if (!(err instanceof ParseError)) {
throw err;
}
catch (err) {
if (!(err instanceof ParseError)) {
throw err;
}
return { returnValues: { output: err.output }, log: err.output };
}
return { returnValues: { output: err.output }, log: err.output };
}
throw new Error(`Invalid stopping method: ${earlyStoppingMethod}`);
});
}
throw new Error(`Invalid stopping method: ${earlyStoppingMethod}`);
}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
switch (data._type) {
case "zero-shot-react-description":
return ZeroShotAgent.deserialize(data);
default:
throw new Error("Unknown agent type");
}
});
/**
* Load an agent from a json-like object describing it.
*/
static async deserialize(data) {
switch (data._type) {
case "zero-shot-react-description":
return ZeroShotAgent.deserialize(data);
default:
throw new Error("Unknown agent type");
}
}
}
//# sourceMappingURL=agent.js.map

@@ -11,2 +11,6 @@ import { ChainValues, BaseChain } from "../chains";

};
/**
* A chain managing an agent using tools.
* @augments BaseChain
*/
export declare class AgentExecutor extends BaseChain {

@@ -19,2 +23,3 @@ agent: Agent;

constructor(input: AgentExecutorInput);
/** Create from agent and a list of tools. */
static fromAgentAndTools(fields: {

@@ -21,0 +26,0 @@ agent: Agent;

@@ -1,11 +0,6 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { BaseChain } from "../chains";
/**
* A chain managing an agent using tools.
* @augments BaseChain
*/
export class AgentExecutor extends BaseChain {

@@ -53,2 +48,3 @@ constructor(input) {

}
/** Create from agent and a list of tools. */
static fromAgentAndTools(fields) {

@@ -60,36 +56,34 @@ return new AgentExecutor(fields);

}
_call(inputs) {
return __awaiter(this, void 0, void 0, function* () {
this.agent.prepareForNewCall();
const toolsByName = Object.fromEntries(this.tools.map((t) => [t.name, t]));
const steps = [];
let iterations = 0;
const getOutput = (finishStep) => {
const { returnValues } = finishStep;
if (this.returnIntermediateSteps) {
return Object.assign(Object.assign({}, returnValues), { intermediateSteps: steps });
}
return returnValues;
};
while (this.shouldContinue(iterations)) {
const action = yield this.agent.plan(steps, inputs);
if ("returnValues" in action) {
return getOutput(action);
}
const tool = toolsByName[action.tool];
const observation = tool
? yield tool.call(action.toolInput)
: `${action.tool} is not a valid tool, try another one.`;
steps.push({ action, observation });
if (tool === null || tool === void 0 ? void 0 : tool.returnDirect) {
return getOutput({
returnValues: { [this.agent.returnValues[0]]: observation },
log: "",
});
}
iterations += 1;
async _call(inputs) {
this.agent.prepareForNewCall();
const toolsByName = Object.fromEntries(this.tools.map((t) => [t.name.toLowerCase(), t]));
const steps = [];
let iterations = 0;
const getOutput = (finishStep) => {
const { returnValues } = finishStep;
if (this.returnIntermediateSteps) {
return { ...returnValues, intermediateSteps: steps };
}
const finish = yield this.agent.returnStoppedResponse(this.earlyStoppingMethod, steps, inputs);
return getOutput(finish);
});
return returnValues;
};
while (this.shouldContinue(iterations)) {
const action = await this.agent.plan(steps, inputs);
if ("returnValues" in action) {
return getOutput(action);
}
const tool = toolsByName[action.tool.toLowerCase()];
const observation = tool
? await tool.call(action.toolInput)
: `${action.tool} is not a valid tool, try another one.`;
steps.push({ action, observation });
if (tool === null || tool === void 0 ? void 0 : tool.returnDirect) {
return getOutput({
returnValues: { [this.agent.returnValues[0]]: observation },
log: "",
});
}
iterations += 1;
}
const finish = await this.agent.returnStoppedResponse(this.earlyStoppingMethod, steps, inputs);
return getOutput(finish);
}

@@ -96,0 +90,0 @@ _chainType() {

@@ -1,13 +0,4 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { LLMChain } from "../chains";
import { resolveConfigFromFile } from "../util";
export const deserializeHelper = (llm, tools, data, fromLLMAndTools, fromConstructor) => __awaiter(void 0, void 0, void 0, function* () {
export const deserializeHelper = async (llm, tools, data, fromLLMAndTools, fromConstructor) => {
if (data.load_from_llm_and_tools) {

@@ -23,5 +14,5 @@ if (!llm) {

const serializedLLMChain = resolveConfigFromFile("llm_chain", data);
const llmChain = yield LLMChain.deserialize(serializedLLMChain);
return fromConstructor(Object.assign(Object.assign({}, data), { llmChain }));
});
const llmChain = await LLMChain.deserialize(serializedLLMChain);
return fromConstructor({ ...data, llmChain });
};
//# sourceMappingURL=helpers.js.map

@@ -6,2 +6,3 @@ export { AgentAction, AgentFinish, AgentStep, StoppingMethod, SerializedAgentT, } from "./types";

export { Tool } from "./tools";
export { initializeAgentExecutor } from "./initialize";
export { loadAgent } from "./load";
export { Agent, staticImplements } from "./agent";
export { AgentExecutor } from "./executor";
export { ZeroShotAgent } from "./mrkl";
export { Tool } from "./tools";
export { initializeAgentExecutor } from "./initialize";
export { loadAgent } from "./load";
//# sourceMappingURL=index.js.map

@@ -1,19 +0,10 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { Agent } from ".";
import { loadFromHub } from "../util/hub";
import { parseFileConfig } from "../util";
const loadAgentFromFile = (file, llmAndTools) => __awaiter(void 0, void 0, void 0, function* () {
const loadAgentFromFile = async (file, llmAndTools) => {
const serialized = parseFileConfig(file);
return Agent.deserialize(Object.assign(Object.assign({}, serialized), llmAndTools));
});
export const loadAgent = (uri, llmAndTools) => __awaiter(void 0, void 0, void 0, function* () {
const hubResult = yield loadFromHub(uri, (u) => loadAgentFromFile(u, llmAndTools), "agents", new Set(["json", "yaml"]));
return Agent.deserialize({ ...serialized, ...llmAndTools });
};
export const loadAgent = async (uri, llmAndTools) => {
const hubResult = await loadFromHub(uri, (u) => loadAgentFromFile(u, llmAndTools), "agents", new Set(["json", "yaml"]));
if (hubResult) {

@@ -23,3 +14,3 @@ return hubResult;

return loadAgentFromFile(uri, llmAndTools);
});
};
//# sourceMappingURL=load.js.map

@@ -11,7 +11,15 @@ import { BaseLLM } from "../../llms";

type CreatePromptArgs = {
/** String to put after the list of tools. */
suffix?: string;
/** String to put before the list of tools. */
prefix?: string;
/** List of input variables the final prompt will expect. */
inputVariables?: string[];
};
type ZeroShotAgentInput = AgentInput;
/**
* Agent for the MRKL chain.
* @augments Agent
* @augments StaticAgent
*/
export declare class ZeroShotAgent extends Agent {

@@ -23,2 +31,11 @@ constructor(input: ZeroShotAgentInput);

static validateTools(tools: Tool[]): void;
/**
* Create prompt in the style of the zero shot agent.
*
* @param tools - List of tools the agent will have access to, used to format the prompt.
* @param args - Arguments to create the prompt with.
* @param args.suffix - String to put after the list of tools.
* @param args.prefix - String to put before the list of tools.
* @param args.inputVariables - List of input variables the final prompt will expect.
*/
static createPrompt(tools: Tool[], args?: CreatePromptArgs): PromptTemplate;

@@ -25,0 +42,0 @@ static fromLLMAndTools(llm: BaseLLM, tools: Tool[], args?: CreatePromptArgs): ZeroShotAgent;

@@ -7,22 +7,2 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {

};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
var ZeroShotAgent_1;

@@ -35,2 +15,7 @@ import { LLMChain } from "../../chains";

const FINAL_ANSWER_ACTION = "Final Answer:";
/**
* Agent for the MRKL chain.
* @augments Agent
* @augments StaticAgent
*/
let ZeroShotAgent = ZeroShotAgent_1 = class ZeroShotAgent extends Agent {

@@ -57,2 +42,11 @@ constructor(input) {

}
/**
* Create prompt in the style of the zero shot agent.
*
* @param tools - List of tools the agent will have access to, used to format the prompt.
* @param args - Arguments to create the prompt with.
* @param args.suffix - String to put after the list of tools.
* @param args.prefix - String to put before the list of tools.
* @param args.inputVariables - List of input variables the final prompt will expect.
*/
static createPrompt(tools, args) {

@@ -94,11 +88,9 @@ const { prefix = PREFIX, suffix = SUFFIX, inputVariables = ["input", "agent_scratchpad"], } = args !== null && args !== void 0 ? args : {};

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const { llm, tools } = data, rest = __rest(data, ["llm", "tools"]);
return deserializeHelper(llm, tools, rest, (llm, tools, args) => ZeroShotAgent_1.fromLLMAndTools(llm, tools, {
prefix: args.prefix,
suffix: args.suffix,
inputVariables: args.input_variables,
}), (args) => new ZeroShotAgent_1(args));
});
static async deserialize(data) {
const { llm, tools, ...rest } = data;
return deserializeHelper(llm, tools, rest, (llm, tools, args) => ZeroShotAgent_1.fromLLMAndTools(llm, tools, {
prefix: args.prefix,
suffix: args.suffix,
inputVariables: args.input_variables,
}), (args) => new ZeroShotAgent_1(args));
}

@@ -105,0 +97,0 @@ };

@@ -1,2 +0,1 @@

/* eslint-disable max-len */
export const PREFIX = `Answer the following questions as best you can. You have access to the following tools:`;

@@ -3,0 +2,0 @@ export const formatInstructions = (toolNames) => `Use the following format:

@@ -1,10 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { test } from "@jest/globals";

@@ -15,6 +6,8 @@ import { OpenAI } from "../../llms/openai";

import { SerpAPI } from "../tools/serpapi";
test("Run agent from hub", () => __awaiter(void 0, void 0, void 0, function* () {
const model = new OpenAI({});
const tools = [SerpAPI()];
const agent = yield loadAgent("lc://agents/zero-shot-react-description/agent.json", { llm: model, tools });
import { Calculator } from "../tools/calculator";
import { initializeAgentExecutor } from "../initialize";
test("Run agent from hub", async () => {
const model = new OpenAI({ temperature: 0 });
const tools = [new SerpAPI(), new Calculator()];
const agent = await loadAgent("lc://agents/zero-shot-react-description/agent.json", { llm: model, tools });
const executor = AgentExecutor.fromAgentAndTools({

@@ -25,7 +18,18 @@ agent,

});
const res = yield executor.call({
const res = await executor.call({
input: "Who is Olivia Wilde's boyfriend? What is his current age raised to the 0.23 power?",
});
console.log(res);
}), 30000);
}, 30000);
test("Run agent locally", async () => {
const model = new OpenAI({ temperature: 0 });
const tools = [new SerpAPI(), new Calculator()];
const executor = await initializeAgentExecutor(tools, model, "zero-shot-react-description");
console.log("Loaded agent.");
const input = "Who is Olivia Wilde's boyfriend?" +
" What is his current age raised to the 0.23 power?";
console.log(`Executing with input "${input}"...`);
const result = await executor.call({ input });
console.log(`Got output ${result.output}`);
}, 30000);
//# sourceMappingURL=agent.test.js.map
export { SerpAPI } from "./serpapi";
export interface Tool {
call: (arg: string) => Promise<string>;
name: string;
description: string;
returnDirect?: boolean;
}
export { Calculator } from "./calculator";
export { Tool } from "./base";
export { SerpAPI } from "./serpapi";
export { Calculator } from "./calculator";
export { Tool } from "./base";
//# sourceMappingURL=index.js.map

@@ -1,3 +0,18 @@

import { GoogleParameters } from "serpapi";
import { Tool } from "./index";
export declare const SerpAPI: (params?: Partial<GoogleParameters>, apiKey?: string) => Tool;
import type { GoogleParameters } from "serpapi";
import { Tool } from "./base";
/**
* Wrapper around SerpAPI.
*
* To use, you should have the `serpapi` package installed and the SERPAPI_API_KEY environment variable set.
*/
export declare class SerpAPI extends Tool {
protected key: string;
protected params: Partial<GoogleParameters>;
constructor(apiKey?: string | undefined, params?: Partial<GoogleParameters>);
name: string;
/**
* Run query through SerpAPI and parse result
*/
call(input: string): Promise<any>;
description: string;
}

@@ -1,46 +0,89 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { getJson } from "serpapi";
export const SerpAPI = (params, apiKey) => {
const key = apiKey !== null && apiKey !== void 0 ? apiKey : process.env.SERPAPI_API_KEY;
return {
name: "search",
call: (input) => __awaiter(void 0, void 0, void 0, function* () {
var _a, _b, _c, _d, _e, _f, _g;
const res = yield getJson("google", Object.assign(Object.assign({}, params), { api_key: key, q: input }));
if (res.error) {
throw new Error(`Got error from serpAPI: ${res.error}`);
}
if ((_a = res.answer_box) === null || _a === void 0 ? void 0 : _a.answer) {
return res.answer_box.answer;
}
if ((_b = res.answer_box) === null || _b === void 0 ? void 0 : _b.snippet) {
return res.answer_box.snippet;
}
if ((_c = res.answer_box) === null || _c === void 0 ? void 0 : _c.snippet_highlighted_words) {
return res.answer_box.snippet_highlighted_words[0];
}
if ((_d = res.sports_results) === null || _d === void 0 ? void 0 : _d.game_spotlight) {
return res.sports_results.game_spotlight;
}
if ((_e = res.knowledge_graph) === null || _e === void 0 ? void 0 : _e.description) {
return res.knowledge_graph.description;
}
if ((_g = (_f = res.organic_results) === null || _f === void 0 ? void 0 : _f[0]) === null || _g === void 0 ? void 0 : _g.snippet) {
return res.organic_results[0].snippet;
}
return "No good search result found";
}),
description:
// eslint-disable-next-line max-len
"a search engine. useful for when you need to answer questions about current events. input should be a search query.",
};
};
import { Tool } from "./base";
let getJson = null;
try {
// eslint-disable-next-line global-require,import/no-extraneous-dependencies
({ getJson } = require("serpapi"));
}
catch (_a) {
// ignore error
}
/**
* Wrapper around SerpAPI.
*
* To use, you should have the `serpapi` package installed and the SERPAPI_API_KEY environment variable set.
*/
export class SerpAPI extends Tool {
constructor(apiKey = process.env.SERPAPI_API_KEY, params = {}) {
super();
Object.defineProperty(this, "key", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "params", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "name", {
enumerable: true,
configurable: true,
writable: true,
value: "search"
});
Object.defineProperty(this, "description", {
enumerable: true,
configurable: true,
writable: true,
value: "a search engine. useful for when you need to answer questions about current events. input should be a search query."
});
// Throw error at construction time.
if (getJson === null) {
throw new Error("Please install serpapi as a dependency with, e.g. `npm i serpapi`");
}
if (!apiKey) {
throw new Error("SerpAPI API key not set. You can set it as SERPAPI_API_KEY in your .env file, or pass it to SerpAPI.");
}
this.key = apiKey;
this.params = params;
}
/**
* Run query through SerpAPI and parse result
*/
async call(input) {
var _a, _b, _c, _d, _e, _f, _g;
if (getJson === null) {
throw new Error("Please install serpapi as a dependency with, e.g. `npm i serpapi`");
}
const res = await getJson("google", {
...this.params,
api_key: this.key,
q: input,
});
if (res.error) {
throw new Error(`Got error from serpAPI: ${res.error}`);
}
if ((_a = res.answer_box) === null || _a === void 0 ? void 0 : _a.answer) {
return res.answer_box.answer;
}
if ((_b = res.answer_box) === null || _b === void 0 ? void 0 : _b.snippet) {
return res.answer_box.snippet;
}
if ((_c = res.answer_box) === null || _c === void 0 ? void 0 : _c.snippet_highlighted_words) {
return res.answer_box.snippet_highlighted_words[0];
}
if ((_d = res.sports_results) === null || _d === void 0 ? void 0 : _d.game_spotlight) {
return res.sports_results.game_spotlight;
}
if ((_e = res.knowledge_graph) === null || _e === void 0 ? void 0 : _e.description) {
return res.knowledge_graph.description;
}
if ((_g = (_f = res.organic_results) === null || _f === void 0 ? void 0 : _f[0]) === null || _g === void 0 ? void 0 : _g.snippet) {
return res.organic_results[0].snippet;
}
return "No good search result found";
}
}
//# sourceMappingURL=serpapi.js.map

@@ -1,12 +0,42 @@

import { SerializedLLMChain } from "./index";
import { LLMChain, StuffDocumentsChain, VectorDBQAChain, ChatVectorDBQAChain } from "./index";
import { BaseMemory } from "../memory";
export type ChainValues = Record<string, any>;
type SerializedBaseChain = SerializedLLMChain;
export declare abstract class BaseChain {
export type LoadValues = Record<string, any>;
declare const chainClasses: (typeof LLMChain | typeof StuffDocumentsChain | typeof VectorDBQAChain | typeof ChatVectorDBQAChain)[];
export type SerializedBaseChain = ReturnType<InstanceType<(typeof chainClasses)[number]>["serialize"]>;
export interface ChainInputs {
memory?: BaseMemory;
}
/**
* Base interface that all chains must implement.
*/
export declare abstract class BaseChain implements ChainInputs {
memory?: BaseMemory;
/**
* Run the core logic of this chain and return the output
*/
abstract _call(values: ChainValues): Promise<ChainValues>;
/**
* Return the string type key uniquely identifying this class of chain.
*/
abstract _chainType(): string;
/**
* Return a json-like object representing this chain.
*/
abstract serialize(): SerializedBaseChain;
/**
* Run the core logic of this chain and add to output if desired.
*
* Wraps {@link _call} and handles memory.
*/
call(values: ChainValues): Promise<ChainValues>;
/**
* Call the chain on all inputs in the list
*/
apply(inputs: ChainValues[]): ChainValues[];
static deserialize(data: SerializedBaseChain): Promise<BaseChain>;
/**
* Load a chain from a json-like object describing it.
*/
static deserialize(data: SerializedBaseChain, values?: LoadValues): Promise<BaseChain>;
}
export {};

@@ -1,14 +0,52 @@

import { LLMChain } from "./index";
import { LLMChain, StuffDocumentsChain, VectorDBQAChain, ChatVectorDBQAChain } from "./index";
const chainClasses = [LLMChain, StuffDocumentsChain, VectorDBQAChain, ChatVectorDBQAChain];
/**
* Base interface that all chains must implement.
*/
export class BaseChain {
call(values) {
constructor() {
Object.defineProperty(this, "memory", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
}
/**
* Run the core logic of this chain and add to output if desired.
*
* Wraps {@link _call} and handles memory.
*/
async call(values) {
const fullValues = structuredClone(values);
if (!(this.memory == null)) {
const newValues = await this.memory.loadMemoryVariables(values);
for (const [key, value] of Object.entries(newValues)) {
fullValues[key] = value;
}
}
// TODO(sean) add callback support
return this._call(values);
const outputValues = this._call(fullValues);
if (!(this.memory == null)) {
this.memory.saveContext(values, outputValues);
}
return outputValues;
}
/**
* Call the chain on all inputs in the list
*/
apply(inputs) {
return inputs.map(this.call);
}
static deserialize(data) {
/**
* Load a chain from a json-like object describing it.
*/
static deserialize(data, values = {}) {
switch (data._type) {
case "llm_chain":
return LLMChain.deserialize(data);
case "stuff_documents_chain":
return StuffDocumentsChain.deserialize(data);
case "vector_db_qa":
return VectorDBQAChain.deserialize(data, values);
default:

@@ -15,0 +53,0 @@ throw new Error(`Invalid prompt type in config: ${data._type}`);

export { BaseChain, ChainValues } from "./base";
export { SerializedLLMChain, LLMChain } from "./llm_chain";
export { SerializedLLMChain, LLMChain, ConversationChain } from "./llm_chain";
export { SerializedStuffDocumentsChain, StuffDocumentsChain, } from "./combine_docs_chain";
export { ChatVectorDBQAChain, SerializedChatVectorDBQAChain } from "./chat_vector_db_chain";
export { VectorDBQAChain, SerializedVectorDBQAChain } from "./vector_db_qa";
export { loadChain } from "./load";
export { loadQAChain } from "./question_answering/load";
export { BaseChain } from "./base";
export { LLMChain } from "./llm_chain";
export { LLMChain, ConversationChain } from "./llm_chain";
export { StuffDocumentsChain, } from "./combine_docs_chain";
export { ChatVectorDBQAChain } from "./chat_vector_db_chain";
export { VectorDBQAChain } from "./vector_db_qa";
export { loadChain } from "./load";
export { loadQAChain } from "./question_answering/load";
//# sourceMappingURL=index.js.map
import { BaseChain, ChainValues } from "./index";
import { BaseLLM, SerializedLLM } from "../llms";
import { BaseMemory } from "../memory";
import { BasePromptTemplate, SerializedBasePromptTemplate } from "../prompt";
export interface LLMChainInput {
/** Prompt object to use */
prompt: BasePromptTemplate;
/** LLM Wrapper to use */
llm: BaseLLM;
/** @ignore */
outputKey: string;

@@ -16,2 +20,14 @@ }

};
/**
* Chain to run queries against LLMs.
* @augments BaseChain
* @augments LLMChainInput
*
* @example
* ```ts
* import { LLMChain, OpenAI, PromptTemplate } from "langchain";
* const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
* const llm = LLMChain({ llm: new OpenAI(), prompt });
* ```
*/
export declare class LLMChain extends BaseChain implements LLMChainInput {

@@ -27,2 +43,13 @@ prompt: BasePromptTemplate;

_call(values: ChainValues): Promise<ChainValues>;
/**
* Format prompt with values and pass to LLM
*
* @param values - keys to pass to prompt template
* @returns Completion from LLM.
*
* @example
* ```ts
* llm.predict({ adjective: "funny" })
* ```
*/
predict(values: ChainValues): Promise<string>;

@@ -33,1 +60,9 @@ _chainType(): "llm_chain";

}
export declare class ConversationChain extends LLMChain {
constructor(fields: {
llm: BaseLLM;
prompt?: BasePromptTemplate;
outputKey?: string;
memory?: BaseMemory;
});
}

@@ -1,14 +0,18 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { BaseChain } from "./index";
import { BaseLLM } from "../llms";
import { BasePromptTemplate } from "../prompt";
import { BufferMemory } from "../memory";
import { BasePromptTemplate, PromptTemplate } from "../prompt";
import { resolveConfigFromFile } from "../util";
/**
* Chain to run queries against LLMs.
* @augments BaseChain
* @augments LLMChainInput
*
* @example
* ```ts
* import { LLMChain, OpenAI, PromptTemplate } from "langchain";
* const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
* const llm = LLMChain({ llm: new OpenAI(), prompt });
* ```
*/
export class LLMChain extends BaseChain {

@@ -40,19 +44,26 @@ constructor(fields) {

}
_call(values) {
return __awaiter(this, void 0, void 0, function* () {
let stop;
if ("stop" in values && Array.isArray(values.stop)) {
stop = values.stop;
}
const formattedString = this.prompt.format(values);
const llmResult = yield this.llm.call(formattedString, stop);
const result = { [this.outputKey]: llmResult };
return result;
});
async _call(values) {
let stop;
if ("stop" in values && Array.isArray(values.stop)) {
stop = values.stop;
}
const formattedString = this.prompt.format(values);
const llmResult = await this.llm.call(formattedString, stop);
const result = { [this.outputKey]: llmResult };
return result;
}
predict(values) {
return __awaiter(this, void 0, void 0, function* () {
const output = yield this.call(values);
return output[this.outputKey];
});
/**
* Format prompt with values and pass to LLM
*
* @param values - keys to pass to prompt template
* @returns Completion from LLM.
*
* @example
* ```ts
* llm.predict({ adjective: "funny" })
* ```
*/
async predict(values) {
const output = await this.call(values);
return output[this.outputKey];
}

@@ -62,10 +73,8 @@ _chainType() {

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const serializedLLM = resolveConfigFromFile("llm", data);
const serializedPrompt = resolveConfigFromFile("prompt", data);
return new LLMChain({
llm: yield BaseLLM.deserialize(serializedLLM),
prompt: yield BasePromptTemplate.deserialize(serializedPrompt),
});
static async deserialize(data) {
const serializedLLM = resolveConfigFromFile("llm", data);
const serializedPrompt = resolveConfigFromFile("prompt", data);
return new LLMChain({
llm: await BaseLLM.deserialize(serializedLLM),
prompt: await BasePromptTemplate.deserialize(serializedPrompt),
});

@@ -81,2 +90,24 @@ }

}
// eslint-disable-next-line max-len
const defaultTemplate = `The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
Current conversation:
{history}
Human: {input}
AI:`;
const defaultPrompt = new PromptTemplate({
template: defaultTemplate,
inputVariables: ["history", "input"],
});
export class ConversationChain extends LLMChain {
constructor(fields) {
var _a, _b, _c;
super({
prompt: (_a = fields.prompt) !== null && _a !== void 0 ? _a : defaultPrompt,
llm: fields.llm,
outputKey: (_b = fields.outputKey) !== null && _b !== void 0 ? _b : "response",
});
this.memory = (_c = fields.memory) !== null && _c !== void 0 ? _c : new BufferMemory();
}
}
//# sourceMappingURL=llm_chain.js.map
import { BaseChain } from ".";
export declare const loadChain: (uri: string) => Promise<BaseChain>;
export type LoadValues = Record<string, any>;
/**
* Load a chain from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("lc://chains/hello-world/chain.json");
* const res = await chain.call({ topic: "my favorite color" });
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("/path/to/chain.json");
* ```
*/
export declare const loadChain: (uri: string, values?: LoadValues) => Promise<BaseChain>;

@@ -1,25 +0,33 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { BaseChain } from ".";
import { loadFromHub } from "../util/hub";
import { parseFileConfig } from "../util";
const loadChainFromFile = (file) => __awaiter(void 0, void 0, void 0, function* () {
const loadChainFromFile = async (file, values = {}) => {
const serialized = parseFileConfig(file);
console.log({ serialized });
return BaseChain.deserialize(serialized);
});
export const loadChain = (uri) => __awaiter(void 0, void 0, void 0, function* () {
const hubResult = yield loadFromHub(uri, loadChainFromFile, "chains", new Set(["json", "yaml"]));
return BaseChain.deserialize(serialized, values);
};
/**
* Load a chain from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("lc://chains/hello-world/chain.json");
* const res = await chain.call({ topic: "my favorite color" });
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadChain } from "langchain/chains";
* const chain = await loadChain("/path/to/chain.json");
* ```
*/
export const loadChain = async (uri, values = {}) => {
const hubResult = await loadFromHub(uri, (uri) => loadChainFromFile(uri, values), "chains", new Set(["json", "yaml"]), values);
if (hubResult) {
return hubResult;
}
return loadChainFromFile(uri);
});
return loadChainFromFile(uri, values);
};
//# sourceMappingURL=load.js.map

@@ -1,16 +0,7 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { test } from "@jest/globals";
import { OpenAI } from "../../llms/openai";
import { PromptTemplate } from "../../prompt";
import { LLMChain } from "../llm_chain";
import { LLMChain, ConversationChain } from "../llm_chain";
import { loadChain } from "../load";
test("Test OpenAI", () => __awaiter(void 0, void 0, void 0, function* () {
test("Test OpenAI", async () => {
const model = new OpenAI({});

@@ -22,10 +13,16 @@ const prompt = new PromptTemplate({

const chain = new LLMChain({ prompt, llm: model });
const res = yield chain.call({ foo: "my favorite color" });
const res = await chain.call({ foo: "my favorite color" });
console.log({ res });
}));
test("Load chain from hub", () => __awaiter(void 0, void 0, void 0, function* () {
const chain = yield loadChain("lc://chains/hello-world/chain.json");
const res = yield chain.call({ topic: "my favorite color" });
});
test("Load chain from hub", async () => {
const chain = await loadChain("lc://chains/hello-world/chain.json");
const res = await chain.call({ topic: "my favorite color" });
console.log({ res });
}));
});
test("Test ConversationChain", async () => {
const model = new OpenAI({});
const chain = new ConversationChain({ llm: model });
const res = await chain.call({ input: "my favorite color" });
console.log({ res });
});
//# sourceMappingURL=llm_chain.test.js.map

@@ -5,20 +5,61 @@ import { LLMCallbackManager, LLMResult } from "./index";

} & Record<string, any>;
/**
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
*/
export declare abstract class BaseLLM {
/**
* The name of the LLM class
*/
name: string;
cache?: boolean;
callbackManager: LLMCallbackManager;
/**
* Whether to print out response text.
*/
verbose?: boolean;
constructor(callbackManager?: LLMCallbackManager, verbose?: boolean);
/**
* Run the LLM on the given prompts and input.
*/
abstract _generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
/** @ignore */
_generateUncached(prompts: string[], stop?: string[]): Promise<LLMResult>;
/**
* Run the LLM on the given propmts an input, handling caching.
*/
generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
/**
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
*/
call(prompt: string, stop?: string[]): Promise<string>;
/**
* Get the identifying parameters of the LLM.
*/
_identifyingParams(): Record<string, any>;
/**
* Return the string type key uniquely identifying this class of LLM.
*/
abstract _llmType(): string;
/**
* Return a json-like object representing this LLM.
*/
serialize(): SerializedLLM;
/**
* Load an LLM from a json-like object describing it.
*/
static deserialize(data: SerializedLLM): Promise<BaseLLM>;
}
/**
* LLM class that provides a simpler interface to subclass than {@link BaseLLM}.
*
* Requires only implementing a simpler {@link _call} method instead of {@link _generate}.
*
* @augments BaseLLM
*/
export declare abstract class LLM extends BaseLLM {
/**
* Run the LLM on the given prompt and input.
*/
abstract _call(prompt: string, stop?: string[]): Promise<string>;
_generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
}

@@ -1,21 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import { OpenAI } from "./index";

@@ -36,4 +16,10 @@ import { InMemoryCache } from "../cache";

const cache = new InMemoryCache();
/**
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
*/
export class BaseLLM {
constructor(callbackManager, verbose) {
/**
* The name of the LLM class
*/
Object.defineProperty(this, "name", {

@@ -57,2 +43,5 @@ enumerable: true,

});
/**
* Whether to print out response text.
*/
Object.defineProperty(this, "verbose", {

@@ -67,59 +56,63 @@ enumerable: true,

}
_generateUncached(prompts, stop) {
return __awaiter(this, void 0, void 0, function* () {
this.callbackManager.handleStart({ name: this.name }, prompts, this.verbose);
let output;
try {
output = yield this._generate(prompts, stop);
}
catch (err) {
this.callbackManager.handleError(`${err}`, this.verbose);
throw err;
}
this.callbackManager.handleEnd(output, this.verbose);
return output;
});
/** @ignore */
async _generateUncached(prompts, stop) {
this.callbackManager.handleStart({ name: this.name }, prompts, this.verbose);
let output;
try {
output = await this._generate(prompts, stop);
}
catch (err) {
this.callbackManager.handleError(`${err}`, this.verbose);
throw err;
}
this.callbackManager.handleEnd(output, this.verbose);
return output;
}
generate(prompts, stop) {
/**
* Run the LLM on the given propmts an input, handling caching.
*/
async generate(prompts, stop) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
if (!Array.isArray(prompts)) {
throw new Error("Argument 'prompts' is expected to be a string[]");
if (!Array.isArray(prompts)) {
throw new Error("Argument 'prompts' is expected to be a string[]");
}
if (this.cache === true && cache === null) {
throw new Error("Requested cache, but no cache found");
}
if (cache === null || this.cache === false) {
return this._generateUncached(prompts, stop);
}
const params = this.serialize();
params.stop = stop;
const llmStringKey = `${Object.entries(params).sort()}`;
const missingPromptIndices = [];
const generations = prompts.map((prompt, index) => {
const result = cache.lookup(prompt, llmStringKey);
if (!result) {
missingPromptIndices.push(index);
}
if (this.cache === true && cache === null) {
throw new Error("Requested cache, but no cache found");
}
if (cache === null || this.cache === false) {
return this._generateUncached(prompts, stop);
}
const params = this.serialize();
params.stop = stop;
const llmStringKey = `${Object.entries(params).sort()}`;
const missingPromptIndices = [];
const generations = prompts.map((prompt, index) => {
const result = cache.lookup(prompt, llmStringKey);
if (!result) {
missingPromptIndices.push(index);
}
return result;
return result;
});
let llmOutput = {};
if (missingPromptIndices.length > 0) {
const results = await this._generateUncached(missingPromptIndices.map((i) => prompts[i]), stop);
results.generations.forEach((generation, index) => {
const promptIndex = missingPromptIndices[index];
generations[promptIndex] = generation;
cache.update(prompts[promptIndex], llmStringKey, generation);
});
let llmOutput = {};
if (missingPromptIndices.length > 0) {
const results = yield this._generateUncached(missingPromptIndices.map((i) => prompts[i]), stop);
results.generations.forEach((generation, index) => {
const promptIndex = missingPromptIndices[index];
generations[promptIndex] = generation;
cache.update(prompts[promptIndex], llmStringKey, generation);
});
llmOutput = (_a = results.llmOutput) !== null && _a !== void 0 ? _a : {};
}
return { generations, llmOutput };
});
llmOutput = (_a = results.llmOutput) !== null && _a !== void 0 ? _a : {};
}
return { generations, llmOutput };
}
call(prompt, stop) {
return __awaiter(this, void 0, void 0, function* () {
const { generations } = yield this.generate([prompt], stop);
return generations[0][0].text;
});
/**
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
*/
async call(prompt, stop) {
const { generations } = await this.generate([prompt], stop);
return generations[0][0].text;
}
/**
* Get the identifying parameters of the LLM.
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any

@@ -129,30 +122,42 @@ _identifyingParams() {

}
/**
* Return a json-like object representing this LLM.
*/
serialize() {
return Object.assign(Object.assign({}, this._identifyingParams()), { _type: this._llmType() });
return {
...this._identifyingParams(),
_type: this._llmType(),
};
}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const { _type } = data, rest = __rest(data, ["_type"]);
const Cls = {
openai: OpenAI,
}[_type];
if (Cls === undefined) {
throw new Error(`Cannot load LLM with type ${_type}`);
}
return new Cls(rest);
});
/**
* Load an LLM from a json-like object describing it.
*/
static async deserialize(data) {
const { _type, ...rest } = data;
const Cls = {
openai: OpenAI,
}[_type];
if (Cls === undefined) {
throw new Error(`Cannot load LLM with type ${_type}`);
}
return new Cls(rest);
}
}
/**
* LLM class that provides a simpler interface to subclass than {@link BaseLLM}.
*
* Requires only implementing a simpler {@link _call} method instead of {@link _generate}.
*
* @augments BaseLLM
*/
export class LLM extends BaseLLM {
_generate(prompts, stop) {
return __awaiter(this, void 0, void 0, function* () {
const generations = [];
for (let i = 0; i < prompts.length; i += 1) {
const text = yield this._call(prompts[i], stop);
generations.push([{ text }]);
}
return { generations };
});
async _generate(prompts, stop) {
const generations = [];
for (let i = 0; i < prompts.length; i += 1) {
const text = await this._call(prompts[i], stop);
generations.push([{ text }]);
}
return { generations };
}
}
//# sourceMappingURL=base.js.map

@@ -11,9 +11,28 @@ export { BaseLLM, LLM, SerializedLLM } from "./base";

};
/**
* Output of a single generation.
*/
export type Generation = {
/**
* Generated text output
*/
text: string;
/**
* Raw generation info response from the provider.
* May include things like reason for finishing (e.g. in {@link OpenAI})
*/
generationInfo?: Record<string, any>;
};
/**
* Contains all relevant information returned by an LLM.
*/
export type LLMResult = {
/**
* List of the things generated. Each input could have multiple {@link Generation | generations}, hence this is a list of lists.
*/
generations: Generation[][];
/**
* Dictionary of arbitrary LLM-provider specific output.
*/
llmOutput?: Record<string, any>;
};
import { BaseLLM } from "./base";
export declare const loadLLM: typeof BaseLLM.deserialize;
export declare const loadLLMFromFile: (file: string) => Promise<BaseLLM>;
/**
* Load an LLM from a local file.
*
* @example
* ```ts
* import { loadLLM } from "langchain/llms";
* const model = await loadLLM("/path/to/llm.json");
* ```
*/
export declare const loadLLM: (file: string) => Promise<BaseLLM>;
import { BaseLLM } from "./base";
import { parseFileConfig } from "../util";
export const loadLLM = BaseLLM.deserialize;
export const loadLLMFromFile = (file) => loadLLM(parseFileConfig(file));
/**
* Load an LLM from a local file.
*
* @example
* ```ts
* import { loadLLM } from "langchain/llms";
* const model = await loadLLM("/path/to/llm.json");
* ```
*/
export const loadLLM = (file) => BaseLLM.deserialize(parseFileConfig(file));
//# sourceMappingURL=load.js.map
import type { CreateCompletionRequest } from "openai";
import { BaseLLM, LLMResult, LLMCallbackManager } from ".";
interface ModelParams {
/** Sampling temperature to use */
temperature: number;
/**
* Maximum number of tokens to generate in the completion. -1 returns as many
* tokens as possible given the prompt and the model's maximum context size.
*/
maxTokens: number;
/** Total probability mass of tokens to consider at each step */
topP: number;
/** Penalizes repeated tokens according to frequency */
frequencyPenalty: number;
/** Penalizes repeated tokens */
presencePenalty: number;
/** Number of completions to generate for each prompt */
n: number;
/** Generates `bestOf` completions server side and returns the "best" */
bestOf: number;
/** Dictionary used to adjust the probability of specific tokens being generated */
logitBias?: Record<string, number>;
}
/**
* Input to OpenAI class.
* @augments ModelParams
*/
interface OpenAIInput extends ModelParams {
/** Model name to use */
modelName: string;
/** Holds any additional parameters that are valid to pass to {@link
* https://platform.openai.com/docs/api-reference/completions/create |
* `openai.createCompletion`} that are not explicitly specified on this class.
*/
modelKwargs?: Kwargs;
/** Batch size to use when passing multiple documents to generate */
batchSize: number;
/** Maximum number of retries to make when generating */
maxRetries: number;
/** List of stop words to use when generating */
stop?: string[];
}
type Kwargs = Record<string, any>;
export declare class OpenAI extends BaseLLM implements ModelParams {
/**
* Wrapper around OpenAI large language models.
*
* To use you should have the `openai` package installed, with the
* `OPENAI_API_KEY` environment variable set.
*
* @remarks
* Any parameters that are valid to be passed to {@link
* https://platform.openai.com/docs/api-reference/completions/create |
* `openai.createCompletion`} can be passed through {@link modelKwargs}, even
* if not explicitly available on this class.
*
* @augments BaseLLM
* @augments OpenAIInput
*/
export declare class OpenAI extends BaseLLM implements OpenAIInput {
temperature: number;

@@ -29,13 +74,14 @@ maxTokens: number;

private client;
constructor(fields?: Partial<ModelParams> & {
constructor(fields?: Partial<OpenAIInput> & {
callbackManager?: LLMCallbackManager;
verbose?: boolean;
modelName?: string;
modelKwargs?: Kwargs;
openAIApiKey?: string;
batchSize?: number;
maxRetries?: number;
stop?: string[];
});
/**
* Get the parameters used to invoke the model
*/
invocationParams(): CreateCompletionRequest & Kwargs;
/**
* Get the identifyin parameters for the model
*/
identifyingParams(): {

@@ -60,3 +106,19 @@ model: string;

};
/**
* Call out to OpenAI's endpoint with k unique prompts
*
* @param prompts - The prompts to pass into the model.
* @param [stop] - Optional list of stop words to use when generating.
*
* @returns The full LLM output.
*
* @example
* ```ts
* import { OpenAI } from "langchain/llms";
* const openai = new OpenAI();
* const response = await openai.generate(["Tell me a joke."]);
* ```
*/
_generate(prompts: string[], stop?: string[]): Promise<LLMResult>;
/** @ignore */
completionWithRetry(request: CreateCompletionRequest): Promise<import("axios").AxiosResponse<import("openai").CreateCompletionResponse, any>>;

@@ -63,0 +125,0 @@ _llmType(): string;

@@ -1,11 +0,3 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { backOff } from "exponential-backoff";
import { chunkArray } from "../util";
import { BaseLLM } from ".";

@@ -15,3 +7,3 @@ let Configuration = null;

try {
// eslint-disable-next-line global-require
// eslint-disable-next-line global-require,import/no-extraneous-dependencies
({ Configuration, OpenAIApi } = require("openai"));

@@ -22,9 +14,17 @@ }

}
const chunkArray = (arr, chunkSize) => arr.reduce((chunks, elem, index) => {
const chunkIndex = Math.floor(index / chunkSize);
const chunk = chunks[chunkIndex] || [];
// eslint-disable-next-line no-param-reassign
chunks[chunkIndex] = chunk.concat([elem]);
return chunks;
}, []);
/**
* Wrapper around OpenAI large language models.
*
* To use you should have the `openai` package installed, with the
* `OPENAI_API_KEY` environment variable set.
*
* @remarks
* Any parameters that are valid to be passed to {@link
* https://platform.openai.com/docs/api-reference/completions/create |
* `openai.createCompletion`} can be passed through {@link modelKwargs}, even
* if not explicitly available on this class.
*
* @augments BaseLLM
* @augments OpenAIInput
*/
export class OpenAI extends BaseLLM {

@@ -119,3 +119,3 @@ constructor(fields) {

if (Configuration === null || OpenAIApi === null) {
throw new Error("Please install openai as a dependency with, e.g. `npm install -S openai`");
throw new Error("Please install openai as a dependency with, e.g. `npm i openai`");
}

@@ -140,50 +140,88 @@ this.modelName = (_a = fields === null || fields === void 0 ? void 0 : fields.modelName) !== null && _a !== void 0 ? _a : this.modelName;

}
/**
* Get the parameters used to invoke the model
*/
invocationParams() {
return Object.assign({ model: this.modelName, temperature: this.temperature, max_tokens: this.maxTokens, top_p: this.topP, frequency_penalty: this.frequencyPenalty, presence_penalty: this.presencePenalty, n: this.n, best_of: this.bestOf, logit_bias: this.logitBias, stop: this.stop }, this.modelKwargs);
return {
model: this.modelName,
temperature: this.temperature,
max_tokens: this.maxTokens,
top_p: this.topP,
frequency_penalty: this.frequencyPenalty,
presence_penalty: this.presencePenalty,
n: this.n,
best_of: this.bestOf,
logit_bias: this.logitBias,
stop: this.stop,
...this.modelKwargs,
};
}
/**
* Get the identifyin parameters for the model
*/
identifyingParams() {
return Object.assign({ model_name: this.modelName }, this.invocationParams());
return {
model_name: this.modelName,
...this.invocationParams(),
};
}
_generate(prompts, stop) {
/**
* Call out to OpenAI's endpoint with k unique prompts
*
* @param prompts - The prompts to pass into the model.
* @param [stop] - Optional list of stop words to use when generating.
*
* @returns The full LLM output.
*
* @example
* ```ts
* import { OpenAI } from "langchain/llms";
* const openai = new OpenAI();
* const response = await openai.generate(["Tell me a joke."]);
* ```
*/
async _generate(prompts, stop) {
var _a, _b, _c, _d;
return __awaiter(this, void 0, void 0, function* () {
const subPrompts = chunkArray(prompts, this.batchSize);
const choices = [];
const tokenUsage = {};
if (this.stop && stop) {
throw new Error("Stop found in input and default params");
const subPrompts = chunkArray(prompts, this.batchSize);
const choices = [];
const tokenUsage = {};
if (this.stop && stop) {
throw new Error("Stop found in input and default params");
}
const params = this.invocationParams();
params.stop = stop !== null && stop !== void 0 ? stop : params.stop;
for (let i = 0; i < subPrompts.length; i += 1) {
const { data } = await this.completionWithRetry({
...params,
prompt: subPrompts[i],
});
choices.push(...data.choices);
const { completion_tokens: completionTokens, prompt_tokens: promptTokens, total_tokens: totalTokens, } = (_a = data.usage) !== null && _a !== void 0 ? _a : {};
if (completionTokens) {
tokenUsage.completionTokens =
((_b = tokenUsage.completionTokens) !== null && _b !== void 0 ? _b : 0) + completionTokens;
}
const params = this.invocationParams();
params.stop = stop !== null && stop !== void 0 ? stop : params.stop;
for (let i = 0; i < subPrompts.length; i += 1) {
const { data } = yield this.completionWithRetry(Object.assign(Object.assign({}, params), { prompt: subPrompts[i] }));
choices.push(...data.choices);
const { completion_tokens: completionTokens, prompt_tokens: promptTokens, total_tokens: totalTokens, } = (_a = data.usage) !== null && _a !== void 0 ? _a : {};
if (completionTokens) {
tokenUsage.completionTokens =
((_b = tokenUsage.completionTokens) !== null && _b !== void 0 ? _b : 0) + completionTokens;
}
if (promptTokens) {
tokenUsage.promptTokens = ((_c = tokenUsage.promptTokens) !== null && _c !== void 0 ? _c : 0) + promptTokens;
}
if (totalTokens) {
tokenUsage.totalTokens = ((_d = tokenUsage.totalTokens) !== null && _d !== void 0 ? _d : 0) + totalTokens;
}
if (promptTokens) {
tokenUsage.promptTokens = ((_c = tokenUsage.promptTokens) !== null && _c !== void 0 ? _c : 0) + promptTokens;
}
const generations = chunkArray(choices, this.n).map((promptChoices) => promptChoices.map((choice) => {
var _a;
return ({
text: (_a = choice.text) !== null && _a !== void 0 ? _a : "",
generationInfo: {
finishReason: choice.finish_reason,
logprobs: choice.logprobs,
},
});
}));
return {
generations,
llmOutput: { tokenUsage },
};
});
if (totalTokens) {
tokenUsage.totalTokens = ((_d = tokenUsage.totalTokens) !== null && _d !== void 0 ? _d : 0) + totalTokens;
}
}
const generations = chunkArray(choices, this.n).map((promptChoices) => promptChoices.map((choice) => {
var _a;
return ({
text: (_a = choice.text) !== null && _a !== void 0 ? _a : "",
generationInfo: {
finishReason: choice.finish_reason,
logprobs: choice.logprobs,
},
});
}));
return {
generations,
llmOutput: { tokenUsage },
};
}
/** @ignore */
completionWithRetry(request) {

@@ -190,0 +228,0 @@ const makeCompletionRequest = () => this.client.createCompletion(request);

@@ -1,17 +0,8 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { test } from "@jest/globals";
import { OpenAI } from "../openai";
test("Test OpenAI", () => __awaiter(void 0, void 0, void 0, function* () {
test("Test OpenAI", async () => {
const model = new OpenAI({ maxTokens: 5 });
const res = yield model.call("Print hello world");
const res = await model.call("Print hello world");
console.log({ res });
}));
});
//# sourceMappingURL=openai.test.js.map

@@ -6,6 +6,20 @@ import { BaseOutputParser } from "./parser";

export type InputValues = Record<string, any>;
/**
* Input common to all prompt templates.
*/
export interface BasePromptTemplateInput {
/**
* A list of variable names the prompt template expects
*/
inputVariables: string[];
/**
* How to parse the output of calling an LLM on this formatted prompt
*/
outputParser?: BaseOutputParser;
}
/**
* Base class for prompt templates. Exposes a format method that returns a
* string prompt given a set of input values.
* @augments BasePromptTemplateInput
*/
export declare abstract class BasePromptTemplate implements BasePromptTemplateInput {

@@ -15,7 +29,32 @@ inputVariables: string[];

constructor(input: BasePromptTemplateInput);
/**
* Format the prompt given the input values.
*
* @param inputValues - A dictionary of arguments to be passed to the prompt template.
* @returns A formatted prompt string.
*
* @example
* ```ts
* prompt.format({ foo: "bar" });
* ```
*/
abstract format(values: InputValues): string;
/**
* Return the string type key uniquely identifying this class of prompt template.
*/
abstract _getPromptType(): string;
/**
* Return a json-like object representing this prompt template.
*/
abstract serialize(): SerializedBasePromptTemplate;
/**
* Load a prompt template from a json-like object describing it.
*
* @remarks
* Deserializing needs to be async because templates (e.g. {@link FewShotPromptTemplate}) can
* reference remote resources that we read asynchronously with a web
* request.
*/
static deserialize(data: SerializedBasePromptTemplate): Promise<BasePromptTemplate>;
}
export {};

@@ -1,12 +0,8 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { PromptTemplate, FewShotPromptTemplate } from "./index";
const templateClasses = [PromptTemplate, FewShotPromptTemplate];
/**
* Base class for prompt templates. Exposes a format method that returns a
* string prompt given a set of input values.
* @augments BasePromptTemplateInput
*/
export class BasePromptTemplate {

@@ -32,20 +28,23 @@ constructor(input) {

}
// Deserializing needs to be async because templates (e.g. few_shot) can
// reference remote resources that we read asynchronously with a web
// request.
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
switch (data._type) {
case "prompt":
return PromptTemplate.deserialize(data);
case undefined:
return PromptTemplate.deserialize(Object.assign(Object.assign({}, data), { _type: "prompt" }));
case "few_shot":
return FewShotPromptTemplate.deserialize(data);
default:
throw new Error(`Invalid prompt type in config: ${data._type}`);
}
});
/**
* Load a prompt template from a json-like object describing it.
*
* @remarks
* Deserializing needs to be async because templates (e.g. {@link FewShotPromptTemplate}) can
* reference remote resources that we read asynchronously with a web
* request.
*/
static async deserialize(data) {
switch (data._type) {
case "prompt":
return PromptTemplate.deserialize(data);
case undefined:
return PromptTemplate.deserialize({ ...data, _type: "prompt" });
case "few_shot":
return FewShotPromptTemplate.deserialize(data);
default:
throw new Error(`Invalid prompt type in config: ${data._type}`);
}
}
}
//# sourceMappingURL=base.js.map

@@ -22,11 +22,46 @@ import { BasePromptTemplate, InputValues, BasePromptTemplateInput } from "./index";

export interface FewShotPromptTemplateInput extends BasePromptTemplateInput {
/**
* Examples to format into the prompt. Exactly one of this or
* {@link exampleSelector} must be
* provided.
*/
examples?: Example[];
/**
* An {@link ExampleSelector} Examples to format into the prompt. Exactly one of this or
* {@link examples} must be
* provided.
*/
exampleSelector?: ExampleSelector;
/**
* An {@link PromptTemplate} used to format a single example.
*/
examplePrompt: PromptTemplate;
exampleSelector?: ExampleSelector;
/**
* String separator used to join the prefix, the examples, and suffix.
*/
exampleSeparator: string;
/**
* A prompt template string to put before the examples.
*
* @defaultValue `""`
*/
prefix: string;
/**
* A prompt template string to put after the examples.
*/
suffix: string;
/**
* The format of the prompt template. Options are: 'f-string', 'jinja-2'
*/
templateFormat: TemplateFormat;
/**
* Whether or not to try validating the template on initialization.
*/
validateTemplate?: boolean;
}
/**
* Prompt template that contains few-shot examples.
* @augments BasePromptTemplate
* @augments FewShotPromptTemplateInput
*/
export declare class FewShotPromptTemplate extends BasePromptTemplate implements FewShotPromptTemplateInput {

@@ -33,0 +68,0 @@ examples?: InputValues[];

@@ -1,10 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { BasePromptTemplate, } from "./index";

@@ -15,2 +6,7 @@ import { checkValidTemplate, renderTemplate } from "./template";

import { BaseOutputParser } from "./parser";
/**
* Prompt template that contains few-shot examples.
* @augments BasePromptTemplate
* @augments FewShotPromptTemplateInput
*/
export class FewShotPromptTemplate extends BasePromptTemplate {

@@ -113,26 +109,24 @@ constructor(input) {

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const serializedPrompt = resolveConfigFromFile("example_prompt", data);
const examplePrompt = yield PromptTemplate.deserialize(serializedPrompt);
let examples;
if (typeof data.examples === "string") {
examples = parseFileConfig(data.examples, [".json", ".yml", ".yaml"]);
}
else if (Array.isArray(data.examples)) {
examples = data.examples;
}
else {
throw new Error("Invalid examples format. Only list or string are supported.");
}
return new FewShotPromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && BaseOutputParser.deserialize(data.output_parser),
examplePrompt,
examples,
exampleSeparator: data.example_separator,
prefix: resolveTemplateFromFile("prefix", data),
suffix: resolveTemplateFromFile("suffix", data),
templateFormat: data.template_format,
});
static async deserialize(data) {
const serializedPrompt = resolveConfigFromFile("example_prompt", data);
const examplePrompt = await PromptTemplate.deserialize(serializedPrompt);
let examples;
if (typeof data.examples === "string") {
examples = parseFileConfig(data.examples, [".json", ".yml", ".yaml"]);
}
else if (Array.isArray(data.examples)) {
examples = data.examples;
}
else {
throw new Error("Invalid examples format. Only list or string are supported.");
}
return new FewShotPromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && BaseOutputParser.deserialize(data.output_parser),
examplePrompt,
examples,
exampleSeparator: data.example_separator,
prefix: resolveTemplateFromFile("prefix", data),
suffix: resolveTemplateFromFile("suffix", data),
templateFormat: data.template_format,
});

@@ -139,0 +133,0 @@ }

export { BasePromptTemplate, BasePromptTemplateInput, SerializedBasePromptTemplate, InputValues, } from "./base";
export { PromptTemplate, PromptTemplateInput, SerializedPromptTemplate, } from "./prompt";
export { FewShotPromptTemplate, FewShotPromptTemplateInput, SerializedFewShotTemplate, } from "./few_shot";
export { loadPrompt } from "./load";
export { BasePromptTemplate, } from "./base";
export { PromptTemplate, } from "./prompt";
export { FewShotPromptTemplate, } from "./few_shot";
export { loadPrompt } from "./load";
//# sourceMappingURL=index.js.map
import { BasePromptTemplate } from ".";
/**
* Load a prompt from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("lc://prompts/hello-world/prompt.yaml");
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("/path/to/prompt.json");
* ```
*/
export declare const loadPrompt: (uri: string) => Promise<BasePromptTemplate>;

@@ -1,16 +0,24 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { BasePromptTemplate } from ".";
import { loadFromHub } from "../util/hub";
import { parseFileConfig } from "../util";
const loadPromptFromFile = (file) => __awaiter(void 0, void 0, void 0, function* () { return BasePromptTemplate.deserialize(parseFileConfig(file)); });
export const loadPrompt = (uri) => __awaiter(void 0, void 0, void 0, function* () {
const hubResult = yield loadFromHub(uri, loadPromptFromFile, "prompts", new Set(["py", "json", "yaml"]));
const loadPromptFromFile = async (file) => BasePromptTemplate.deserialize(parseFileConfig(file));
/**
* Load a prompt from {@link https://github.com/hwchase17/langchain-hub | LangchainHub} or local filesystem.
*
* @example
* Loading from LangchainHub:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("lc://prompts/hello-world/prompt.yaml");
* ```
*
* @example
* Loading from local filesystem:
* ```ts
* import { loadPrompt } from "langchain/prompt";
* const prompt = await loadPrompt("/path/to/prompt.json");
* ```
*/
export const loadPrompt = async (uri) => {
const hubResult = await loadFromHub(uri, loadPromptFromFile, "prompts", new Set(["py", "json", "yaml"]));
if (hubResult) {

@@ -20,3 +28,3 @@ return hubResult;

return loadPromptFromFile(uri);
});
};
//# sourceMappingURL=load.js.map
export type SerializedOutputParser = SerializedRegexParser | SerializedCommaSeparatedListOutputParser;
/**
* Class to parse the output of an LLM call.
*/
export declare abstract class BaseOutputParser {
/**
* Parse the output of an LLM call.
*
* @param text - LLM output to parse.
* @returns Parsed output.
*/
abstract parse(text: string): string | string[] | Record<string, string>;
/**
* Return the string type key uniquely identifying this class of parser
*/
_type(): string;
/**
* Return a json-like object representing this output parser.
*/
abstract serialize(): SerializedOutputParser;
/**
* Load an output parser from a json-like object describing the parser.
*/
static deserialize(data: SerializedOutputParser): BaseOutputParser;
}
/**
* Class to parse the output of an LLM call to a list.
* @augments BaseOutputParser
*/
export declare abstract class ListOutputParser extends BaseOutputParser {

@@ -14,2 +36,6 @@ abstract parse(text: string): string[];

};
/**
* Class to parse the output of an LLM call as a comma-separated list.
* @augments ListOutputParser
*/
export declare class CommaSeparatedListOutputParser extends ListOutputParser {

@@ -26,2 +52,6 @@ parse(text: string): string[];

};
/**
* Class to parse the output of an LLM call into a dictionary.
* @augments BaseOutputParser
*/
export declare class RegexParser extends BaseOutputParser {

@@ -28,0 +58,0 @@ regex: string | RegExp;

@@ -0,5 +1,14 @@

/**
* Class to parse the output of an LLM call.
*/
export class BaseOutputParser {
/**
* Return the string type key uniquely identifying this class of parser
*/
_type() {
throw new Error("_type not implemented");
}
/**
* Load an output parser from a json-like object describing the parser.
*/
static deserialize(data) {

@@ -15,4 +24,12 @@ switch (data._type) {

}
/**
* Class to parse the output of an LLM call to a list.
* @augments BaseOutputParser
*/
export class ListOutputParser extends BaseOutputParser {
}
/**
* Class to parse the output of an LLM call as a comma-separated list.
* @augments ListOutputParser
*/
export class CommaSeparatedListOutputParser extends ListOutputParser {

@@ -31,2 +48,6 @@ parse(text) {

}
/**
* Class to parse the output of an LLM call into a dictionary.
* @augments BaseOutputParser
*/
export class RegexParser extends BaseOutputParser {

@@ -33,0 +54,0 @@ constructor(regex, outputKeys, defaultOutputKey) {

@@ -12,7 +12,39 @@ import { BasePromptTemplate, BasePromptTemplateInput, InputValues } from "./index";

};
/**
* Inputs to create a {@link PromptTemplate}
* @augments BasePromptTemplateInput
*/
export interface PromptTemplateInput extends BasePromptTemplateInput {
/**
* The propmt template
*/
template: string;
/**
* The format of the prompt template. Options are 'f-string', 'jinja-2'
*
* @defaultValue 'f-string'
*/
templateFormat?: TemplateFormat;
/**
* Whether or not to try validating the template on initialization
*
* @defaultValue `true`
*/
validateTemplate?: boolean;
}
/**
* Schema to represent a basic prompt for an LLM.
* @augments BasePromptTemplate
* @augments PromptTemplateInput
*
* @example
* ```ts
* import { PromptTemplate } from "@langchain/prompt";
*
* const prompt = new PromptTemplate({
* inputVariables: ["foo"],
* template: "Say {foo}",
* });
* ```
*/
export declare class PromptTemplate extends BasePromptTemplate implements PromptTemplateInput {

@@ -25,3 +57,19 @@ template: string;

format(values: InputValues): string;
/**
* Take examples in list format with prefix and suffix to create a prompt.
*
* Intendend to be used a a way to dynamically create a prompt from examples.
*
* @param examples - List of examples to use in the prompt.
* @param suffix - String to go after the list of examples. Should generally set up the user's input.
* @param inputVariables - A list of variable names the final prompt template will expect
* @param exampleSeparator - The separator to use in between examples
* @param prefix - String that should go before any examples. Generally includes examples.
*
* @returns The final prompt template generated.
*/
static fromExamples(examples: string[], suffix: string, inputVariables: string[], exampleSeparator?: string, prefix?: string): PromptTemplate;
/**
* Load prompt template from a template f-string
*/
static fromTemplate(template: string): PromptTemplate;

@@ -28,0 +76,0 @@ serialize(): SerializedPromptTemplate;

@@ -1,10 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { BasePromptTemplate, } from "./index";

@@ -14,2 +5,17 @@ import { checkValidTemplate, renderTemplate, parseFString, } from "./template";

import { BaseOutputParser } from "./parser";
/**
* Schema to represent a basic prompt for an LLM.
* @augments BasePromptTemplate
* @augments PromptTemplateInput
*
* @example
* ```ts
* import { PromptTemplate } from "@langchain/prompt";
*
* const prompt = new PromptTemplate({
* inputVariables: ["foo"],
* template: "Say {foo}",
* });
* ```
*/
export class PromptTemplate extends BasePromptTemplate {

@@ -47,2 +53,15 @@ constructor(input) {

}
/**
* Take examples in list format with prefix and suffix to create a prompt.
*
* Intendend to be used a a way to dynamically create a prompt from examples.
*
* @param examples - List of examples to use in the prompt.
* @param suffix - String to go after the list of examples. Should generally set up the user's input.
* @param inputVariables - A list of variable names the final prompt template will expect
* @param exampleSeparator - The separator to use in between examples
* @param prefix - String that should go before any examples. Generally includes examples.
*
* @returns The final prompt template generated.
*/
static fromExamples(examples, suffix, inputVariables, exampleSeparator = "\n\n", prefix = "") {

@@ -55,2 +74,5 @@ const template = [prefix, ...examples, suffix].join(exampleSeparator);

}
/**
* Load prompt template from a template f-string
*/
static fromTemplate(template) {

@@ -78,14 +100,12 @@ const names = new Set();

}
static deserialize(data) {
return __awaiter(this, void 0, void 0, function* () {
const res = new PromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && BaseOutputParser.deserialize(data.output_parser),
template: resolveTemplateFromFile("template", data),
templateFormat: data.template_format,
});
return res;
static async deserialize(data) {
const res = new PromptTemplate({
inputVariables: data.input_variables,
outputParser: data.output_parser && BaseOutputParser.deserialize(data.output_parser),
template: resolveTemplateFromFile("template", data),
templateFormat: data.template_format,
});
return res;
}
}
//# sourceMappingURL=prompt.js.map

@@ -1,10 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
import { expect, test } from "@jest/globals";

@@ -14,13 +5,13 @@ import path from "path";

const PROMPTS_DIR = path.join(__dirname, "prompts");
test("Load Hello World Prompt", () => __awaiter(void 0, void 0, void 0, function* () {
test("Load Hello World Prompt", async () => {
const helloWorld = path.join(PROMPTS_DIR, "hello_world.yaml");
const prompt = yield loadPrompt(helloWorld);
const prompt = await loadPrompt(helloWorld);
expect(prompt._getPromptType()).toBe("prompt");
expect(prompt.format({})).toBe("Say hello world.");
}));
test("Load hub prompt", () => __awaiter(void 0, void 0, void 0, function* () {
const prompt = yield loadPrompt("lc@abb92d8://prompts/hello-world/prompt.yaml");
});
test("Load hub prompt", async () => {
const prompt = await loadPrompt("lc@abb92d8://prompts/hello-world/prompt.yaml");
expect(prompt._getPromptType()).toBe("prompt");
expect(prompt.format({})).toBe("Say hello world.");
}));
});
//# sourceMappingURL=load.test.js.map

@@ -1,1 +0,2 @@

export declare const loadFromHub: <T>(uri: string, loader: (a: string) => T, validPrefix: string, validSuffixes: Set<string>) => Promise<T | undefined>;
export type LoadValues = Record<string, any>;
export declare const loadFromHub: <T>(uri: string, loader: (a: string, values: LoadValues) => T, validPrefix: string, validSuffixes: Set<string>, values?: LoadValues) => Promise<T | undefined>;

@@ -1,10 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var _a, _b;

@@ -18,3 +9,3 @@ import path from "path";

const URL_BASE = (_b = process.env.LANGCHAIN_HUB_URL_BASE) !== null && _b !== void 0 ? _b : "https://raw.githubusercontent.com/hwchase17/langchain-hub/";
export const loadFromHub = (uri, loader, validPrefix, validSuffixes) => __awaiter(void 0, void 0, void 0, function* () {
export const loadFromHub = async (uri, loader, validPrefix, validSuffixes, values = {}) => {
const match = uri.match(HUB_PATH_REGEX);

@@ -34,12 +25,12 @@ if (!match) {

const url = [URL_BASE, ref, remotePath].join("/");
const res = yield fetchWithTimeout(url, { timeout: 5000 });
const res = await fetchWithTimeout(url, { timeout: 5000 });
if (res.status !== 200) {
throw new Error(`Could not find file at ${url}`);
}
const text = yield res.text();
const text = await res.text();
const tmpdir = fs.mkdtempSync(path.join(os.tmpdir(), "langchain"));
const file = path.join(tmpdir, path.basename(remotePath));
fs.writeFileSync(file, text);
return loader(file);
});
return loader(file, values);
};
//# sourceMappingURL=hub.js.map

@@ -13,1 +13,2 @@ import { RequestInit } from "node-fetch";

export declare const parseFileConfig: (file: string, supportedTypes?: string[]) => any;
export declare const chunkArray: <T>(arr: T[], chunkSize: number) => T[][];

@@ -1,21 +0,1 @@

var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __rest = (this && this.__rest) || function (s, e) {
var t = {};
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
};
import path from "path";

@@ -25,11 +5,11 @@ import fetch from "node-fetch";

import * as yaml from "yaml";
export const fetchWithTimeout = (url, init) => __awaiter(void 0, void 0, void 0, function* () {
const { timeout } = init, rest = __rest(init, ["timeout"]);
export const fetchWithTimeout = async (url, init) => {
const { timeout, ...rest } = init;
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), timeout);
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const res = yield fetch(url, Object.assign(Object.assign({}, rest), { signal: controller.signal }));
const res = await fetch(url, { ...rest, signal: controller.signal });
clearTimeout(timeoutId);
return res;
});
};
const loadFileContents = (contents, format) => {

@@ -75,2 +55,9 @@ switch (format) {

};
export const chunkArray = (arr, chunkSize) => arr.reduce((chunks, elem, index) => {
const chunkIndex = Math.floor(index / chunkSize);
const chunk = chunks[chunkIndex] || [];
// eslint-disable-next-line no-param-reassign
chunks[chunkIndex] = chunk.concat([elem]);
return chunks;
}, []);
//# sourceMappingURL=index.js.map
{
"name": "langchain",
"version": "0.0.4",
"version": "0.0.5",
"description": "Typescript bindings for langchain",

@@ -11,37 +11,6 @@ "main": "./dist/cjs/index.js",

],
"exports": {
".": {
"types": "./dist/cjs/index.d.ts",
"require": "./dist/cjs/index.js",
"import": "./dist/esm/index.js"
},
"./agents": {
"types": "./dist/cjs/agents/index.d.ts",
"require": "./dist/cjs/agents/index.js",
"import": "./dist/esm/agents/index.js"
},
"./tools": {
"types": "./dist/cjs/agents/tools/index.d.ts",
"require": "./dist/cjs/agents/tools/index.js",
"import": "./dist/esm/agents/tools/index.js"
},
"./prompt": {
"types": "./dist/cjs/prompt/index.d.ts",
"require": "./dist/cjs/prompt/index.js",
"import": "./dist/esm/prompt/index.js"
},
"./chains": {
"types": "./dist/cjs/chains/index.d.ts",
"require": "./dist/cjs/chains/index.js",
"import": "./dist/esm/chains/index.js"
},
"./llms": {
"types": "./dist/cjs/llms/index.d.ts",
"require": "./dist/cjs/llms/index.js",
"import": "./dist/esm/llms/index.js"
}
},
"scripts": {
"build": "tsc --declaration --outDir dist/esm --module esnext && tsc --declaration --outDir dist/cjs",
"lint": "eslint .",
"doc": "typedoc",
"lint:fix": "yarn lint --fix",

@@ -71,2 +40,3 @@ "precommit": "tsc --noEmit && lint-staged",

"eslint-plugin-prettier": "^4.2.1",
"hnswlib-node": "^1.2.0",
"husky": "^8.0.3",

@@ -77,4 +47,7 @@ "jest": "^29.4.2",

"prettier": "^2.8.3",
"serpapi": "^1.1.0",
"serpapi": "^1.1.1",
"ts-jest": "^29.0.5",
"ts-node": "^10.9.1",
"typedoc": "^0.23.25",
"typedoc-plugin-missing-exports": "^1.0.0",
"typescript": "^4.9.5"

@@ -84,9 +57,6 @@ },

"exponential-backoff": "^3.1.0",
"expr-eval": "^2.0.2",
"node-fetch": "2",
"yaml": "^2.2.1"
},
"optionalDependencies": {
"openai": "^3.1.0",
"serpapi": "^1.1.0"
},
"lint-staged": {

@@ -98,2 +68,59 @@ "**/*.{ts,tsx}": [

},
"exports": {
".": {
"types": "./dist/cjs/index.d.ts",
"require": "./dist/cjs/index.js",
"import": "./dist/esm/index.js"
},
"./agents": {
"types": "./dist/cjs/agents/index.d.ts",
"require": "./dist/cjs/agents/index.js",
"import": "./dist/esm/agents/index.js"
},
"./tools": {
"types": "./dist/cjs/agents/tools/index.d.ts",
"require": "./dist/cjs/agents/tools/index.js",
"import": "./dist/esm/agents/tools/index.js"
},
"./prompt": {
"types": "./dist/cjs/prompt/index.d.ts",
"require": "./dist/cjs/prompt/index.js",
"import": "./dist/esm/prompt/index.js"
},
"./chains": {
"types": "./dist/cjs/chains/index.d.ts",
"require": "./dist/cjs/chains/index.js",
"import": "./dist/esm/chains/index.js"
},
"./llms": {
"types": "./dist/cjs/llms/index.d.ts",
"require": "./dist/cjs/llms/index.js",
"import": "./dist/esm/llms/index.js"
},
"./embeddings": {
"types": "./dist/cjs/embeddings/index.d.ts",
"require": "./dist/cjs/embeddings/index.js",
"import": "./dist/esm/embeddings/index.js"
},
"./vectorstores": {
"types": "./dist/cjs/vectorstores/index.d.ts",
"require": "./dist/cjs/vectorstores/index.js",
"import": "./dist/esm/vectorstores/index.js"
},
"./text_splitter": {
"types": "./dist/cjs/text_splitter.d.ts",
"require": "./dist/cjs/text_splitter.js",
"import": "./dist/esm/text_splitter.js"
},
"./memory": {
"types": "./dist/cjs/memory/index.d.ts",
"require": "./dist/cjs/memory/index.js",
"import": "./dist/esm/memory/index.js"
},
"./document": {
"types": "./dist/cjs/document.d.ts",
"require": "./dist/cjs/document.js",
"import": "./dist/esm/document.js"
}
},
"publishConfig": {

@@ -100,0 +127,0 @@ "access": "public"

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc