Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

chatgpt-official

Package Overview
Dependencies
Maintainers
1
Versions
20
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

chatgpt-official - npm Package Compare versions

Comparing version 1.1.4 to 1.1.5

dist/classes/openai.d.ts

12

dist/classes/chatgpt.d.ts

@@ -1,2 +0,2 @@

import Options from "../models/options.js";
import Options from "../models/chatgpt-options.js";
import Conversation from "../models/conversation.js";

@@ -8,3 +8,2 @@ declare class ChatGPT {

options: Options;
private openAi;
constructor(key: string, options?: Options);

@@ -20,13 +19,8 @@ private chunksToLines;

askStream(data: (arg0: string) => void, prompt: string, conversationId?: string, userName?: string): Promise<string>;
private aksRevProxy;
private isJSON;
private generatePrompt;
moderate(prompt: string): Promise<boolean>;
private convToString;
private generateMessages;
private countTokens;
private getToday;
private getTime;
private wait;
private validateToken;
getTokens(): Promise<void>;
}
export default ChatGPT;
import { encode } from "gpt-3-encoder";
import { Configuration, OpenAIApi } from "openai";
import axios from "axios";

@@ -10,3 +9,2 @@ import MessageType from "../enums/message-type.js";

options;
openAi;
constructor(key, options) {

@@ -16,5 +14,5 @@ this.key = key;

this.options = {
model: options?.model || "text-davinci-003",
model: options?.model || "gpt-3.5-turbo",
temperature: options?.temperature || 0.7,
max_tokens: options?.max_tokens || 512,
max_tokens: options?.max_tokens || 100,
top_p: options?.top_p || 0.9,

@@ -24,11 +22,3 @@ frequency_penalty: options?.frequency_penalty || 0,

instructions: options?.instructions || `You are ChatGPT, a language model developed by OpenAI. You are designed to respond to user input in a conversational manner, Answer as concisely as possible. Your training data comes from a diverse range of internet text and You have been trained to generate human-like responses to various questions and prompts. You can provide information on a wide range of topics, but your knowledge is limited to what was present in your training data, which has a cutoff date of 2021. You strive to provide accurate and helpful information to the best of your ability.\nKnowledge cutoff: 2021-09`,
stop: options?.stop || "<|im_end|>",
aiName: options?.aiName || "ChatGPT",
moderation: options?.moderation || false,
revProxy: options?.revProxy,
};
this.openAi = new OpenAIApi(new Configuration({ apiKey: this.key }));
if (!this.key.startsWith("sk-"))
if (!this.accessToken || !this.validateToken(this.accessToken))
this.getTokens();
}

@@ -61,7 +51,5 @@ async *chunksToLines(chunksAsync) {

getInstructions(username) {
return `[START_INSTRUCTIONS]
${this.options.instructions}
return `${this.options.instructions}
Current date: ${this.getToday()}
Current time: ${this.getTime()}${username !== "User" ? `\nName of the user talking to: ${username}` : ""}
[END_INSTRUCTIONS]${this.options.stop}\n`;
Current time: ${this.getTime()}${username !== "User" ? `\nName of the user talking to: ${username}` : ""}`;
}

@@ -97,135 +85,11 @@ addConversation(conversationId, userName = "User") {

async ask(prompt, conversationId = "default", userName = "User") {
if (!this.key.startsWith("sk-"))
if (!this.accessToken || !this.validateToken(this.accessToken))
await this.getTokens();
let conversation = this.getConversation(conversationId, userName);
let promptStr = this.generatePrompt(conversation, prompt);
if (this.options.moderation && this.key.startsWith("sk-")) {
let flagged = await this.moderate(promptStr);
if (flagged) {
return "Your message was flagged as inappropriate and was not sent.";
}
}
try {
let responseStr;
if (!this.options.revProxy) {
const response = await this.openAi.createCompletion({
model: this.options.model,
prompt: promptStr,
temperature: this.options.temperature,
max_tokens: this.options.max_tokens,
top_p: this.options.top_p,
frequency_penalty: this.options.frequency_penalty,
presence_penalty: this.options.presence_penalty,
stop: [this.options.stop],
});
responseStr = response.data.choices[0].text;
}
else {
responseStr = await this.aksRevProxy(promptStr);
}
responseStr = responseStr
.replace(new RegExp(`\n${conversation.userName}:.*`, "gs"), "")
.replace(new RegExp(`${conversation.userName}:.*`, "gs"), "")
.replace(/<\|im_end\|>/g, "")
.replace(this.options.stop, "")
.replace(`${this.options.aiName}: `, "")
.trim();
conversation.messages.push({
content: responseStr,
type: MessageType.AI,
date: Date.now(),
});
return responseStr;
}
catch (error) {
throw new Error(error?.response?.data?.error?.message);
}
return await this.askStream((data) => { }, prompt, conversationId, userName);
}
async askStream(data, prompt, conversationId = "default", userName = "User") {
if (!this.key.startsWith("sk-"))
if (!this.accessToken || !this.validateToken(this.accessToken))
await this.getTokens();
let conversation = this.getConversation(conversationId, userName);
if (this.options.moderation && this.key.startsWith("sk-")) {
let flagged = await this.moderate(prompt);
if (flagged) {
for (let chunk in "Your message was flagged as inappropriate and was not sent.".split("")) {
data(chunk);
await this.wait(100);
}
return "Your message was flagged as inappropriate and was not sent.";
}
}
let promptStr = this.generatePrompt(conversation, prompt);
try {
let responseStr = "";
if (!this.options.revProxy) {
const response = await this.openAi.createCompletion({
model: this.options.model,
prompt: promptStr,
temperature: this.options.temperature,
max_tokens: this.options.max_tokens,
top_p: this.options.top_p,
frequency_penalty: this.options.frequency_penalty,
presence_penalty: this.options.presence_penalty,
stop: [this.options.stop],
stream: true,
}, { responseType: "stream" });
for await (const message of this.streamCompletion(response.data)) {
try {
const parsed = JSON.parse(message);
const { text } = parsed.choices[0];
responseStr += text;
data(text);
}
catch (error) {
console.error("Could not JSON parse stream message", message, error);
}
}
}
else {
responseStr = await this.aksRevProxy(promptStr, data);
}
responseStr = responseStr
.replace(new RegExp(`\n${conversation.userName}:.*`, "gs"), "")
.replace(new RegExp(`${conversation.userName}:.*`, "gs"), "")
.replace(/<\|im_end\|>/g, "")
.replace(this.options.stop, "")
.replace(`${this.options.aiName}: `, "")
.trim();
conversation.messages.push({
content: responseStr,
type: MessageType.AI,
date: Date.now(),
});
return responseStr;
}
catch (error) {
if (error.response?.status) {
console.error(error.response.status, error.message);
error.response.data.on("data", (data) => {
const message = data.toString();
try {
const parsed = JSON.parse(message);
console.error("An error occurred during OpenAI request: ", parsed);
}
catch (error) {
console.error("An error occurred during OpenAI request: ", message);
}
});
}
else {
console.error("An error occurred during OpenAI request", error);
}
}
}
async aksRevProxy(prompt, data = (_) => { }) {
if (!this.key.startsWith("sk-"))
if (!this.accessToken || !this.validateToken(this.accessToken))
await this.getTokens();
try {
const response = await axios.post(this.options.revProxy, {
const response = await axios.post(`https://api.openai.com/v1/chat/completions`, {
model: this.options.model,
prompt: prompt,
messages: promptStr,
temperature: this.options.temperature,

@@ -236,3 +100,3 @@ max_tokens: this.options.max_tokens,

presence_penalty: this.options.presence_penalty,
stop: [this.options.stop],
stream: true,
}, {

@@ -243,35 +107,36 @@ responseType: "stream",

"Content-Type": "application/json",
Authorization: `Bearer ${this.key.startsWith("sk-") ? this.key : this.accessToken}`,
Authorization: `Bearer ${this.key}`,
},
});
let responseStr = "";
response.data.on("data", (chunk) => {
responseStr += chunk;
data(chunk);
});
await new Promise((resolve) => response.data.on("end", resolve));
responseStr = responseStr.trim();
if (this.isJSON(responseStr)) {
let jsonData = JSON.parse(responseStr);
let response = jsonData?.choices[0]?.text;
return response ?? "";
for await (const message of this.streamCompletion(response.data)) {
try {
const parsed = JSON.parse(message);
const { content } = parsed.choices[0].delta;
if (content) {
responseStr += content;
data(content);
}
}
catch (error) {
console.error("Could not JSON parse stream message", message, error);
}
}
else
return responseStr;
return responseStr;
}
catch (error) {
throw new Error(error?.response?.data?.error?.message);
if (error.response && error.response.data && error.response.headers["content-type"] === "application/json") {
let errorResponseStr = "";
for await (const message of error.response.data) {
errorResponseStr += message;
}
const errorResponseJson = JSON.parse(errorResponseStr);
throw new Error(errorResponseJson.error.message);
}
else {
throw new Error(error.message);
}
}
}
isJSON(str) {
try {
JSON.parse(str);
return true;
}
catch (e) {
return false;
}
}
generatePrompt(conversation, prompt) {
prompt = [",", "!", "?", "."].includes(prompt[prompt.length - 1]) ? prompt : `${prompt}.`;
conversation.messages.push({

@@ -282,21 +147,15 @@ content: prompt,

});
let promptStr = this.convToString(conversation);
let promptEncodedLength = encode(promptStr).length;
let messages = this.generateMessages(conversation);
let promptEncodedLength = this.countTokens(messages);
let totalLength = promptEncodedLength + this.options.max_tokens;
while (totalLength > 4096) {
while (totalLength > 99999999999999999999999999999) {
conversation.messages.shift();
promptStr = this.convToString(conversation);
promptEncodedLength = encode(promptStr).length;
messages = this.generateMessages(conversation);
promptEncodedLength = this.countTokens(messages);
totalLength = promptEncodedLength + this.options.max_tokens;
}
conversation.lastActive = Date.now();
return promptStr;
return messages;
}
async moderate(prompt) {
let response = await this.openAi.createModeration({
input: prompt,
});
return response.data.results[0].flagged;
}
convToString(conversation) {
generateMessages(conversation) {
let messages = [];

@@ -306,10 +165,22 @@ for (let i = 0; i < conversation.messages.length; i++) {

if (i === 0) {
messages.push(this.getInstructions(conversation.userName));
messages.push({
role: "system",
content: this.getInstructions(conversation.userName),
});
}
messages.push(`${message.type === MessageType.User ? conversation.userName : this.options.aiName}: ${conversation.messages[i].content}${this.options.stop}`);
messages.push({
role: message.type === MessageType.User ? "user" : "assistant",
content: message.content,
});
}
messages.push(`${this.options.aiName}: `);
let result = messages.join("\n");
return result;
return messages;
}
countTokens(messages) {
let tokens = 0;
for (let i = 0; i < messages.length; i++) {
let message = messages[i];
tokens += encode(message.content).length;
}
return tokens;
}
getToday() {

@@ -332,40 +203,4 @@ let today = new Date();

}
wait(ms) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
validateToken(token) {
console.log("validating access token...");
if (!token)
return false;
const parsed = JSON.parse(Buffer.from(token.split(".")[1], "base64").toString());
console.log(Date.now() <= parsed.exp * 1000 ? "valid" : "invalid");
return Date.now() <= parsed.exp * 1000;
}
async getTokens() {
console.log("getting new access token...");
if (!this.key) {
throw new Error("No session token provided");
}
const response = await axios.request({
method: "GET",
url: Buffer.from("aHR0cHM6Ly9leHBsb3Jlci5hcGkub3BlbmFpLmNvbS9hcGkvYXV0aC9zZXNzaW9u", "base64").toString("ascii"),
headers: {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/109.0",
Cookie: `__Secure-next-auth.session-token=${this.key}`,
},
});
try {
const cookies = response.headers["set-cookie"];
const sessionCookie = cookies.find((cookie) => cookie.startsWith("__Secure-next-auth.session-token"));
this.key = sessionCookie.split("=")[1];
this.accessToken = response.data.accessToken;
console.log(this.key);
console.log(this.accessToken);
}
catch (err) {
throw new Error(`Failed to fetch new session tokens due to: ${err}`);
}
}
}
export default ChatGPT;
//# sourceMappingURL=chatgpt.js.map
declare enum MessageType {
User = 0,
AI = 1
System = 0,
User = 1,
Assistant = 2
}
export default MessageType;
var MessageType;
(function (MessageType) {
MessageType[MessageType["User"] = 0] = "User";
MessageType[MessageType["AI"] = 1] = "AI";
MessageType[MessageType["System"] = 0] = "System";
MessageType[MessageType["User"] = 1] = "User";
MessageType[MessageType["Assistant"] = 2] = "Assistant";
})(MessageType || (MessageType = {}));
export default MessageType;
//# sourceMappingURL=message-type.js.map

@@ -0,2 +1,4 @@

import OpenAI from "./classes/openai.js";
import ChatGPT from "./classes/chatgpt.js";
export default ChatGPT;
export default OpenAI;
export { ChatGPT };

@@ -0,3 +1,5 @@

import OpenAI from "./classes/openai.js";
import ChatGPT from "./classes/chatgpt.js";
export default ChatGPT;
export default OpenAI;
export { ChatGPT };
//# sourceMappingURL=index.js.map

@@ -1,2 +0,2 @@

import ChatGPT from "../dist/index.js";
import OpenAI from "../dist/index.js";
import readline from "readline";

@@ -9,3 +9,3 @@

let bot = new ChatGPT(process.env.OPENAI_API_KEY);
let bot = new OpenAI(process.env.OPENAI_API_KEY);

@@ -12,0 +12,0 @@ async function main() {

{
"name": "chatgpt-official",
"version": "1.1.4",
"version": "1.1.5",
"description": "ChatGPT Client using official OpenAI API",

@@ -8,3 +8,3 @@ "main": "dist/index.js",

"scripts": {
"start": "tsc;node examples/cli.js",
"start": "tsc;node examples/chat.js",
"build": "tsc"

@@ -11,0 +11,0 @@ },

@@ -21,4 +21,6 @@ # chatgpt-official - a simple library to create ChatGPT that uses OpenAI official API [[Discord](https://discord.pawan.krd)]

### Official Chat Completion API from OpenAI
```javascript
import ChatGPT from "chatgpt-official";
import { ChatGPT } from "chatgpt-official";

@@ -30,19 +32,30 @@ let bot = new ChatGPT("<OPENAI_API_KEY>");

```
## Usage
### Creating ChatGPT with Official OpenAI Completion API
```javascript
import ChatGPT from "chatgpt-official";
import OpenAI from "chatgpt-official";
let bot = new OpenAI("<OPENAI_API_KEY>");
let response = await bot.ask("Hello?");
console.log(response);
```
## Usage: Official Chat Completion API from OpenAI
```javascript
import { ChatGPT } from "chatgpt-official";
let options = {
temperature: 0.7, // OpenAI parameter
max_tokens: 256, // OpenAI parameter [Max response size by tokens]
top_p: 1, // OpenAI parameter
frequency_penalty: 0, // OpenAI parameter
presence_penalty: 0, // OpenAI parameter
instructions: `You are ChatGPT, a large language model trained by OpenAI.`, // initial instructions for the bot
model: "text-davinci-003", // OpenAI parameter `text-davinci-003` is PAID
stop: "<|im_end|>", // OpenAI parameter
}
temperature: 0.7, // OpenAI parameter
max_tokens: 100, // OpenAI parameter [Max response size by tokens]
top_p: 0.9, // OpenAI parameter
frequency_penalty: 0, // OpenAI parameter
presence_penalty: 0, // OpenAI parameter
instructions: `You are ChatGPT, a large language model trained by OpenAI.`, // initial instructions for the bot
model: "gpt-3.5-turbo", // OpenAI parameter `gpt-3.5-turbo` is PAID
};
let bot = new ChatGPT("<OPENAI_API_KEY>", options); // Note: options is optional
let bot = new ChatGPT("<OPENAI_API_KEY>", options); // Note: options is optional

@@ -60,1 +73,31 @@ let response = await bot.ask("Hello?");

```
## Usage: Creating ChatGPT with Official OpenAI Completion API
```javascript
import OpenAI from "chatgpt-official";
let options = {
temperature: 0.7, // OpenAI parameter
max_tokens: 256, // OpenAI parameter [Max response size by tokens]
top_p: 0.9, // OpenAI parameter
frequency_penalty: 0, // OpenAI parameter
presence_penalty: 0, // OpenAI parameter
instructions: `You are ChatGPT, a large language model trained by OpenAI.`, // initial instructions for the bot
model: "text-davinci-003", // OpenAI parameter `text-davinci-003` is PAID
stop: "<|im_end|>", // OpenAI parameter
};
let bot = new OpenAI("<OPENAI_API_KEY>", options); // Note: options is optional
let response = await bot.ask("Hello?");
console.log(response);
let conversationId = "conversation name";
let response1 = await bot.ask("Hello?", conversationId);
console.log(response1);
let conversationId2 = "another conversation name";
let response2 = await bot.ask("Hello?", conversationId2);
console.log(response2);
```
import { encode } from "gpt-3-encoder";
import { Configuration, OpenAIApi } from "openai";
import axios from "axios";
import Options from "../models/options.js";
import Options from "../models/chatgpt-options.js";
import Conversation from "../models/conversation.js";
import Message from "../models/chatgpt-message.js";
import MessageType from "../enums/message-type.js";

@@ -14,3 +14,2 @@

public options: Options;
private openAi: OpenAIApi;
constructor(key: string, options?: Options) {

@@ -20,5 +19,5 @@ this.key = key;

this.options = {
model: options?.model || "text-davinci-003", // default model
model: options?.model || "gpt-3.5-turbo", // default model
temperature: options?.temperature || 0.7,
max_tokens: options?.max_tokens || 512,
max_tokens: options?.max_tokens || 100,
top_p: options?.top_p || 0.9,

@@ -28,9 +27,3 @@ frequency_penalty: options?.frequency_penalty || 0,

instructions: options?.instructions || `You are ChatGPT, a language model developed by OpenAI. You are designed to respond to user input in a conversational manner, Answer as concisely as possible. Your training data comes from a diverse range of internet text and You have been trained to generate human-like responses to various questions and prompts. You can provide information on a wide range of topics, but your knowledge is limited to what was present in your training data, which has a cutoff date of 2021. You strive to provide accurate and helpful information to the best of your ability.\nKnowledge cutoff: 2021-09`,
stop: options?.stop || "<|im_end|>",
aiName: options?.aiName || "ChatGPT",
moderation: options?.moderation || false,
revProxy: options?.revProxy,
};
this.openAi = new OpenAIApi(new Configuration({ apiKey: this.key }));
if (!this.key.startsWith("sk-")) if (!this.accessToken || !this.validateToken(this.accessToken)) this.getTokens();
}

@@ -67,7 +60,5 @@

private getInstructions(username: string): string {
return `[START_INSTRUCTIONS]
${this.options.instructions}
return `${this.options.instructions}
Current date: ${this.getToday()}
Current time: ${this.getTime()}${username !== "User" ? `\nName of the user talking to: ${username}` : ""}
[END_INSTRUCTIONS]${this.options.stop}\n`;
Current time: ${this.getTime()}${username !== "User" ? `\nName of the user talking to: ${username}` : ""}`;
}

@@ -110,140 +101,15 @@

public async ask(prompt: string, conversationId: string = "default", userName: string = "User") {
if (!this.key.startsWith("sk-")) if (!this.accessToken || !this.validateToken(this.accessToken)) await this.getTokens();
let conversation = this.getConversation(conversationId, userName);
let promptStr = this.generatePrompt(conversation, prompt);
if (this.options.moderation && this.key.startsWith("sk-")) {
let flagged = await this.moderate(promptStr);
if (flagged) {
return "Your message was flagged as inappropriate and was not sent.";
}
}
try {
let responseStr: string;
if (!this.options.revProxy) {
const response = await this.openAi.createCompletion({
model: this.options.model,
prompt: promptStr,
temperature: this.options.temperature,
max_tokens: this.options.max_tokens,
top_p: this.options.top_p,
frequency_penalty: this.options.frequency_penalty,
presence_penalty: this.options.presence_penalty,
stop: [this.options.stop],
});
responseStr = response.data.choices[0].text;
} else {
responseStr = await this.aksRevProxy(promptStr);
}
responseStr = responseStr
.replace(new RegExp(`\n${conversation.userName}:.*`, "gs"), "")
.replace(new RegExp(`${conversation.userName}:.*`, "gs"), "")
.replace(/<\|im_end\|>/g, "")
.replace(this.options.stop, "")
.replace(`${this.options.aiName}: `, "")
.trim();
conversation.messages.push({
content: responseStr,
type: MessageType.AI,
date: Date.now(),
});
return responseStr;
} catch (error: any) {
throw new Error(error?.response?.data?.error?.message);
}
return await this.askStream((data) => {}, prompt, conversationId, userName);
}
public async askStream(data: (arg0: string) => void, prompt: string, conversationId: string = "default", userName: string = "User") {
if (!this.key.startsWith("sk-")) if (!this.accessToken || !this.validateToken(this.accessToken)) await this.getTokens();
let conversation = this.getConversation(conversationId, userName);
if (this.options.moderation && this.key.startsWith("sk-")) {
let flagged = await this.moderate(prompt);
if (flagged) {
for (let chunk in "Your message was flagged as inappropriate and was not sent.".split("")) {
data(chunk);
await this.wait(100);
}
return "Your message was flagged as inappropriate and was not sent.";
}
}
let promptStr = this.generatePrompt(conversation, prompt);
try {
let responseStr: string = "";
if (!this.options.revProxy) {
const response = await this.openAi.createCompletion(
{
model: this.options.model,
prompt: promptStr,
temperature: this.options.temperature,
max_tokens: this.options.max_tokens,
top_p: this.options.top_p,
frequency_penalty: this.options.frequency_penalty,
presence_penalty: this.options.presence_penalty,
stop: [this.options.stop],
stream: true,
},
{ responseType: "stream" },
);
for await (const message of this.streamCompletion(response.data)) {
try {
const parsed = JSON.parse(message);
const { text } = parsed.choices[0];
responseStr += text;
data(text);
} catch (error) {
console.error("Could not JSON parse stream message", message, error);
}
}
} else {
responseStr = await this.aksRevProxy(promptStr, data);
}
responseStr = responseStr
.replace(new RegExp(`\n${conversation.userName}:.*`, "gs"), "")
.replace(new RegExp(`${conversation.userName}:.*`, "gs"), "")
.replace(/<\|im_end\|>/g, "")
.replace(this.options.stop, "")
.replace(`${this.options.aiName}: `, "")
.trim();
conversation.messages.push({
content: responseStr,
type: MessageType.AI,
date: Date.now(),
});
return responseStr;
} catch (error: any) {
if (error.response?.status) {
console.error(error.response.status, error.message);
error.response.data.on("data", (data: any) => {
const message = data.toString();
try {
const parsed = JSON.parse(message);
console.error("An error occurred during OpenAI request: ", parsed);
} catch (error) {
console.error("An error occurred during OpenAI request: ", message);
}
});
} else {
console.error("An error occurred during OpenAI request", error);
}
}
}
private async aksRevProxy(prompt: string, data: (arg0: string) => void = (_) => {}) {
if (!this.key.startsWith("sk-")) if (!this.accessToken || !this.validateToken(this.accessToken)) await this.getTokens();
try {
const response = await axios.post(
this.options.revProxy,
`https://api.openai.com/v1/chat/completions`,
{
model: this.options.model,
prompt: prompt,
messages: promptStr,
temperature: this.options.temperature,

@@ -254,4 +120,3 @@ max_tokens: this.options.max_tokens,

presence_penalty: this.options.presence_penalty,
stop: [this.options.stop],
// stream: true,
stream: true,
},

@@ -263,3 +128,3 @@ {

"Content-Type": "application/json",
Authorization: `Bearer ${this.key.startsWith("sk-") ? this.key : this.accessToken}`,
Authorization: `Bearer ${this.key}`,
},

@@ -271,31 +136,33 @@ },

response.data.on("data", (chunk: string) => {
responseStr += chunk;
data(chunk);
});
for await (const message of this.streamCompletion(response.data)) {
try {
const parsed = JSON.parse(message);
const { content } = parsed.choices[0].delta;
if (content) {
responseStr += content;
data(content);
}
} catch (error) {
console.error("Could not JSON parse stream message", message, error);
}
}
await new Promise((resolve) => response.data.on("end", resolve));
responseStr = responseStr.trim();
if (this.isJSON(responseStr)) {
let jsonData = JSON.parse(responseStr);
let response = jsonData?.choices[0]?.text;
return response ?? "";
} else return responseStr;
return responseStr;
} catch (error: any) {
throw new Error(error?.response?.data?.error?.message);
}
}
if (error.response && error.response.data && error.response.headers["content-type"] === "application/json") {
let errorResponseStr = "";
private isJSON(str: string) {
try {
JSON.parse(str);
return true;
} catch (e) {
return false;
for await (const message of error.response.data) {
errorResponseStr += message;
}
const errorResponseJson = JSON.parse(errorResponseStr);
throw new Error(errorResponseJson.error.message);
} else {
throw new Error(error.message);
}
}
}
private generatePrompt(conversation: Conversation, prompt: string) {
prompt = [",", "!", "?", "."].includes(prompt[prompt.length - 1]) ? prompt : `${prompt}.`; // Thanks to https://github.com/optionsx
private generatePrompt(conversation: Conversation, prompt: string): Message[] {
conversation.messages.push({

@@ -307,10 +174,10 @@ content: prompt,

let promptStr = this.convToString(conversation);
let promptEncodedLength = encode(promptStr).length;
let messages = this.generateMessages(conversation);
let promptEncodedLength = this.countTokens(messages);
let totalLength = promptEncodedLength + this.options.max_tokens;
while (totalLength > 4096) {
while (totalLength > 99999999999999999999999999999) {
conversation.messages.shift();
promptStr = this.convToString(conversation);
promptEncodedLength = encode(promptStr).length;
messages = this.generateMessages(conversation);
promptEncodedLength = this.countTokens(messages);
totalLength = promptEncodedLength + this.options.max_tokens;

@@ -320,26 +187,32 @@ }

conversation.lastActive = Date.now();
return promptStr;
return messages;
}
public async moderate(prompt: string) {
let response = await this.openAi.createModeration({
input: prompt,
});
return response.data.results[0].flagged;
}
private convToString(conversation: Conversation) {
let messages: string[] = [];
private generateMessages(conversation: Conversation): Message[] {
let messages: Message[] = [];
for (let i = 0; i < conversation.messages.length; i++) {
let message = conversation.messages[i];
if (i === 0) {
messages.push(this.getInstructions(conversation.userName));
messages.push({
role: "system",
content: this.getInstructions(conversation.userName),
});
}
messages.push(`${message.type === MessageType.User ? conversation.userName : this.options.aiName}: ${conversation.messages[i].content}${this.options.stop}`);
messages.push({
role: message.type === MessageType.User ? "user" : "assistant",
content: message.content,
});
}
messages.push(`${this.options.aiName}: `);
let result = messages.join("\n");
return result;
return messages;
}
private countTokens(messages: Message[]): number {
let tokens: number = 0;
for (let i = 0; i < messages.length; i++) {
let message = messages[i];
tokens += encode(message.content).length;
}
return tokens;
}
private getToday() {

@@ -363,40 +236,4 @@ let today = new Date();

}
private wait(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
private validateToken(token: string) {
if (!token) return false;
const parsed = JSON.parse(Buffer.from(token.split(".")[1], "base64").toString());
return Date.now() <= parsed.exp * 1000;
}
async getTokens() {
if (!this.key) {
throw new Error("No session token provided");
}
const response = await axios.request({
method: "GET",
url: Buffer.from("aHR0cHM6Ly9leHBsb3Jlci5hcGkub3BlbmFpLmNvbS9hcGkvYXV0aC9zZXNzaW9u", "base64").toString("ascii"),
headers: {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/109.0",
Cookie: `__Secure-next-auth.session-token=${this.key}`,
},
});
try {
const cookies = response.headers["set-cookie"];
const sessionCookie = cookies.find((cookie) => cookie.startsWith("__Secure-next-auth.session-token"));
this.key = sessionCookie.split("=")[1];
this.accessToken = response.data.accessToken;
} catch (err) {
throw new Error(`Failed to fetch new session tokens due to: ${err}`);
}
}
}
export default ChatGPT;
enum MessageType {
System,
User,
AI,
Assistant,
}
export default MessageType;

@@ -0,3 +1,5 @@

import OpenAI from "./classes/openai.js";
import ChatGPT from "./classes/chatgpt.js";
export default ChatGPT;
export default OpenAI;
export { ChatGPT };

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc