chatgpt-official
Advanced tools
Comparing version 1.0.8 to 1.0.9
@@ -7,14 +7,18 @@ import options from "../models/options.js"; | ||
options: options; | ||
private openAi; | ||
instructionTokens: number; | ||
constructor(key: string, options?: options); | ||
addConversation(conversationId: string, userName?: string, aiName?: string): { | ||
addConversation(conversationId: string, userName?: string): { | ||
id: string; | ||
userName: string; | ||
aiName: string; | ||
messages: any[]; | ||
}; | ||
private chunksToLines; | ||
private linesToMessages; | ||
private streamCompletion; | ||
private getInstructions; | ||
getConversation(conversationId: string, userName?: string, aiName?: string): conversation; | ||
getConversation(conversationId: string, userName?: string): conversation; | ||
resetConversation(conversationId: string): conversation; | ||
ask(prompt: string, conversationId?: string, userName?: string, aiName?: string): Promise<any>; | ||
ask(prompt: string, conversationId?: string, userName?: string): Promise<string>; | ||
askStream(data: (arg0: string) => void, prompt: string, conversationId?: string, userName?: string): Promise<string>; | ||
private generatePrompt; | ||
@@ -21,0 +25,0 @@ private getToday; |
@@ -1,3 +0,3 @@ | ||
import axios from "axios"; | ||
import { encode } from "gpt-3-encoder"; | ||
import { Configuration, OpenAIApi } from "openai"; | ||
class ChatGPT { | ||
@@ -7,2 +7,3 @@ key; | ||
options; | ||
openAi; | ||
instructionTokens; | ||
@@ -13,4 +14,4 @@ constructor(key, options) { | ||
this.options = { | ||
model: options?.model || "text-chat-davinci-002-20230126", | ||
temperature: options?.temperature || 0.1, | ||
model: options?.model || "text-chat-davinci-002-20221122", | ||
temperature: options?.temperature || 0.7, | ||
max_tokens: options?.max_tokens || 1024, | ||
@@ -26,18 +27,44 @@ top_p: options?.top_p || 0.9, | ||
stop: options?.stop || "<|im_end|>", | ||
aiName: options?.aiName || "ChatGPT", | ||
}; | ||
this.openAi = new OpenAIApi(new Configuration({ apiKey: this.key })); | ||
this.instructionTokens = encode(this.options.instructions).length; | ||
} | ||
addConversation(conversationId, userName = "User", aiName = "ChatGPT") { | ||
addConversation(conversationId, userName = "User") { | ||
let conversation = { | ||
id: conversationId, | ||
userName: userName, | ||
aiName: aiName, | ||
messages: [], | ||
}; | ||
conversation.messages.push(this.getInstructions(aiName)); | ||
conversation.messages.push(this.getInstructions()); | ||
this.conversations.push(conversation); | ||
return conversation; | ||
} | ||
getInstructions(aiName) { | ||
return `${aiName !== null ? this.options.instructions.replace("You are ChatGPT", `You are ${aiName}`) : this.options.instructions} | ||
async *chunksToLines(chunksAsync) { | ||
let previous = ""; | ||
for await (const chunk of chunksAsync) { | ||
const bufferChunk = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk); | ||
previous += bufferChunk; | ||
let eolIndex; | ||
while ((eolIndex = previous.indexOf("\n")) >= 0) { | ||
const line = previous.slice(0, eolIndex + 1).trimEnd(); | ||
if (line === "data: [DONE]") | ||
break; | ||
if (line.startsWith("data: ")) | ||
yield line; | ||
previous = previous.slice(eolIndex + 1); | ||
} | ||
} | ||
} | ||
async *linesToMessages(linesAsync) { | ||
for await (const line of linesAsync) { | ||
const message = line.substring("data :".length); | ||
yield message; | ||
} | ||
} | ||
async *streamCompletion(data) { | ||
yield* this.linesToMessages(this.chunksToLines(data)); | ||
} | ||
getInstructions() { | ||
return `${this.options.instructions.replace("You are ChatGPT", `You are ${this.options.aiName}`)} | ||
you do not have the capability to retain information from previous interactions. Every time a user interacts with you, it is treated as a standalone session and you do not have the ability to store any information or recall past conversations | ||
@@ -47,6 +74,6 @@ Respond conversationally. | ||
} | ||
getConversation(conversationId, userName = "User", aiName = "ChatGPT") { | ||
getConversation(conversationId, userName = "User") { | ||
let conversation = this.conversations.find((conversation) => conversation.id === conversationId); | ||
if (!conversation) { | ||
conversation = this.addConversation(conversationId, userName, aiName); | ||
conversation = this.addConversation(conversationId, userName); | ||
} | ||
@@ -56,2 +83,3 @@ else { | ||
} | ||
conversation.userName = userName; | ||
return conversation; | ||
@@ -63,3 +91,3 @@ } | ||
conversation.messages = []; | ||
conversation.messages.push(this.getInstructions(conversation.aiName)); | ||
conversation.messages.push(this.getInstructions()); | ||
conversation.lastActive = Date.now(); | ||
@@ -69,33 +97,102 @@ } | ||
} | ||
async ask(prompt, conversationId = "default", userName = "User", aiName = "ChatGPT") { | ||
let conversation = this.getConversation(conversationId, userName, aiName); | ||
async ask(prompt, conversationId = "default", userName = "User") { | ||
let conversation = this.getConversation(conversationId, userName); | ||
let promptStr = this.generatePrompt(conversation, prompt); | ||
const response = await axios.post("https://api.openai.com/v1/completions", { | ||
model: this.options.model, | ||
prompt: promptStr, | ||
temperature: this.options.temperature, | ||
max_tokens: this.options.max_tokens, | ||
top_p: this.options.top_p, | ||
frequency_penalty: this.options.frequency_penalty, | ||
presence_penalty: this.options.presence_penalty, | ||
stop: [this.options.stop], | ||
}, { | ||
headers: { | ||
"Content-Type": "application/json", | ||
Authorization: `Bearer ${this.key}`, | ||
}, | ||
}); | ||
let responseStr = response.data.choices[0].text | ||
.replace(/<\|im_end\|>/g, "") | ||
.replace(this.options.stop, "") | ||
.replace(`${conversation.aiName}: `, "") | ||
.trim(); | ||
conversation.messages.push(`${responseStr}${this.options.stop}\n`); | ||
return responseStr; | ||
try { | ||
const response = await this.openAi.createCompletion({ | ||
model: this.options.model, | ||
prompt: promptStr, | ||
temperature: this.options.temperature, | ||
max_tokens: this.options.max_tokens, | ||
top_p: this.options.top_p, | ||
frequency_penalty: this.options.frequency_penalty, | ||
presence_penalty: this.options.presence_penalty, | ||
stop: [this.options.stop], | ||
}); | ||
let responseStr = response.data.choices[0].text | ||
.replace(/<\|im_end\|>/g, "") | ||
.replace(this.options.stop, "") | ||
.replace(`${this.options.aiName}: `, "") | ||
.trim(); | ||
conversation.messages.push(`${responseStr}${this.options.stop}\n`); | ||
return responseStr; | ||
} | ||
catch (error) { | ||
if (error.response?.status) { | ||
console.error(error.response.status, error.message); | ||
error.response.data.on("data", (data) => { | ||
const message = data.toString(); | ||
try { | ||
const parsed = JSON.parse(message); | ||
console.error("An error occurred during OpenAI request: ", parsed); | ||
} | ||
catch (error) { | ||
console.error("An error occurred during OpenAI request: ", message); | ||
} | ||
}); | ||
} | ||
else { | ||
console.error("An error occurred during OpenAI request", error); | ||
} | ||
} | ||
} | ||
async askStream(data, prompt, conversationId = "default", userName = "User") { | ||
let conversation = this.getConversation(conversationId, userName); | ||
let promptStr = this.generatePrompt(conversation, prompt); | ||
try { | ||
const response = await this.openAi.createCompletion({ | ||
model: this.options.model, | ||
prompt: promptStr, | ||
temperature: this.options.temperature, | ||
max_tokens: this.options.max_tokens, | ||
top_p: this.options.top_p, | ||
frequency_penalty: this.options.frequency_penalty, | ||
presence_penalty: this.options.presence_penalty, | ||
stop: [this.options.stop], | ||
stream: true, | ||
}, { responseType: "stream" }); | ||
let responseStr = ""; | ||
for await (const message of this.streamCompletion(response.data)) { | ||
try { | ||
const parsed = JSON.parse(message); | ||
const { text } = parsed.choices[0]; | ||
responseStr += text; | ||
data(text); | ||
} | ||
catch (error) { | ||
console.error("Could not JSON parse stream message", message, error); | ||
} | ||
} | ||
responseStr = responseStr | ||
.replace(/<\|im_end\|>/g, "") | ||
.replace(this.options.stop, "") | ||
.replace(`${this.options.aiName}: `, "") | ||
.trim(); | ||
conversation.messages.push(`${responseStr}${this.options.stop}\n`); | ||
return responseStr; | ||
} | ||
catch (error) { | ||
if (error.response?.status) { | ||
console.error(error.response.status, error.message); | ||
error.response.data.on("data", (data) => { | ||
const message = data.toString(); | ||
try { | ||
const parsed = JSON.parse(message); | ||
console.error("An error occurred during OpenAI request: ", parsed); | ||
} | ||
catch (error) { | ||
console.error("An error occurred during OpenAI request: ", message); | ||
} | ||
}); | ||
} | ||
else { | ||
console.error("An error occurred during OpenAI request", error); | ||
} | ||
} | ||
} | ||
generatePrompt(conversation, prompt) { | ||
prompt = [",", "!", "?", "."].includes(prompt[prompt.length - 1]) ? prompt : `${prompt}.`; | ||
conversation.messages.push(`${conversation.userName}":${prompt}\n${conversation.aiName}:`); | ||
conversation.messages.push(`${conversation.userName}":${prompt}\n${this.options.aiName}:`); | ||
if (!conversation.messages[0].includes("Current date:")) | ||
conversation.messages[0] = this.getInstructions(conversation.aiName); | ||
conversation.messages[0] = this.getInstructions(); | ||
let promptStr = conversation.messages.join(); | ||
@@ -107,3 +204,3 @@ let promptEncodedLength = encode(promptStr).length; | ||
if (!conversation.messages[0].includes("Current date:")) | ||
conversation.messages[0] = this.getInstructions(conversation.aiName); | ||
conversation.messages[0] = this.getInstructions(); | ||
promptStr = conversation.messages.join(); | ||
@@ -110,0 +207,0 @@ promptEncodedLength = encode(promptStr).length; |
@@ -5,5 +5,4 @@ interface conversation { | ||
userName: string; | ||
aiName: string; | ||
lastActive?: number; | ||
} | ||
export default conversation; |
@@ -10,3 +10,4 @@ interface options { | ||
stop?: string; | ||
aiName?: string; | ||
} | ||
export default options; |
{ | ||
"name": "chatgpt-official", | ||
"version": "1.0.8", | ||
"version": "1.0.9", | ||
"description": "ChatGPT Client using official OpenAI API", | ||
@@ -55,4 +55,4 @@ "main": "dist/index.js", | ||
"dependencies": { | ||
"axios": "1.2.2", | ||
"gpt-3-encoder": "1.1.4" | ||
"gpt-3-encoder": "1.1.4", | ||
"openai": "3.1.0" | ||
}, | ||
@@ -59,0 +59,0 @@ "bugs": { |
@@ -1,3 +0,3 @@ | ||
import axios from "axios"; | ||
import { encode } from "gpt-3-encoder"; | ||
import { Configuration, OpenAIApi } from "openai"; | ||
@@ -11,2 +11,3 @@ import options from "../models/options.js"; | ||
public options: options; | ||
private openAi: OpenAIApi; | ||
public instructionTokens: number; | ||
@@ -17,4 +18,4 @@ constructor(key: string, options?: options) { | ||
this.options = { | ||
model: options?.model || "text-chat-davinci-002-20230126", | ||
temperature: options?.temperature || 0.1, | ||
model: options?.model || "text-chat-davinci-002-20221122", // default model updated to an older model (2022-11-22) found by @canfam - Discord:pig#8932 // you can use the newest model (2023-01-26) using my private API https://gist.github.com/PawanOsman/be803be44caed2449927860956b240ad | ||
temperature: options?.temperature || 0.7, | ||
max_tokens: options?.max_tokens || 1024, | ||
@@ -31,14 +32,15 @@ top_p: options?.top_p || 0.9, | ||
stop: options?.stop || "<|im_end|>", | ||
aiName: options?.aiName || "ChatGPT", | ||
}; | ||
this.openAi = new OpenAIApi(new Configuration({ apiKey: this.key })); | ||
this.instructionTokens = encode(this.options.instructions).length; | ||
} | ||
public addConversation(conversationId: string, userName: string = "User", aiName = "ChatGPT") { | ||
public addConversation(conversationId: string, userName: string = "User") { | ||
let conversation = { | ||
id: conversationId, | ||
userName: userName, | ||
aiName: aiName, | ||
messages: [], | ||
}; | ||
conversation.messages.push(this.getInstructions(aiName)); | ||
conversation.messages.push(this.getInstructions()); | ||
this.conversations.push(conversation); | ||
@@ -49,4 +51,32 @@ | ||
private getInstructions(aiName?: string): string { | ||
return `${aiName !== null ? this.options.instructions.replace("You are ChatGPT", `You are ${aiName}`) : this.options.instructions} | ||
private async *chunksToLines(chunksAsync: any) { | ||
let previous = ""; | ||
for await (const chunk of chunksAsync) { | ||
const bufferChunk = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk); | ||
previous += bufferChunk; | ||
let eolIndex; | ||
while ((eolIndex = previous.indexOf("\n")) >= 0) { | ||
// line includes the EOL | ||
const line = previous.slice(0, eolIndex + 1).trimEnd(); | ||
if (line === "data: [DONE]") break; | ||
if (line.startsWith("data: ")) yield line; | ||
previous = previous.slice(eolIndex + 1); | ||
} | ||
} | ||
} | ||
private async *linesToMessages(linesAsync) { | ||
for await (const line of linesAsync) { | ||
const message = line.substring("data :".length); | ||
yield message; | ||
} | ||
} | ||
private async *streamCompletion(data: any) { | ||
yield* this.linesToMessages(this.chunksToLines(data)); | ||
} | ||
private getInstructions(): string { | ||
return `${this.options.instructions.replace("You are ChatGPT", `You are ${this.options.aiName}`)} | ||
you do not have the capability to retain information from previous interactions. Every time a user interacts with you, it is treated as a standalone session and you do not have the ability to store any information or recall past conversations | ||
@@ -57,6 +87,6 @@ Respond conversationally. | ||
public getConversation(conversationId: string, userName: string = "User", aiName = "ChatGPT") { | ||
public getConversation(conversationId: string, userName: string = "User") { | ||
let conversation = this.conversations.find((conversation) => conversation.id === conversationId); | ||
if (!conversation) { | ||
conversation = this.addConversation(conversationId, userName, aiName); | ||
conversation = this.addConversation(conversationId, userName); | ||
} else { | ||
@@ -66,2 +96,4 @@ conversation.lastActive = Date.now(); | ||
conversation.userName = userName; | ||
return conversation; | ||
@@ -74,3 +106,3 @@ } | ||
conversation.messages = []; | ||
conversation.messages.push(this.getInstructions(conversation.aiName)); | ||
conversation.messages.push(this.getInstructions()); | ||
conversation.lastActive = Date.now(); | ||
@@ -82,9 +114,8 @@ } | ||
public async ask(prompt: string, conversationId: string = "default", userName: string = "User", aiName = "ChatGPT") { | ||
let conversation = this.getConversation(conversationId, userName, aiName); | ||
public async ask(prompt: string, conversationId: string = "default", userName: string = "User") { | ||
let conversation = this.getConversation(conversationId, userName); | ||
let promptStr = this.generatePrompt(conversation, prompt); | ||
const response = await axios.post( | ||
"https://api.openai.com/v1/completions", | ||
{ | ||
try { | ||
const response = await this.openAi.createCompletion({ | ||
model: this.options.model, | ||
@@ -98,18 +129,85 @@ prompt: promptStr, | ||
stop: [this.options.stop], | ||
}, | ||
{ | ||
headers: { | ||
"Content-Type": "application/json", | ||
Authorization: `Bearer ${this.key}`, | ||
}); | ||
let responseStr = response.data.choices[0].text | ||
.replace(/<\|im_end\|>/g, "") | ||
.replace(this.options.stop, "") | ||
.replace(`${this.options.aiName}: `, "") | ||
.trim(); | ||
conversation.messages.push(`${responseStr}${this.options.stop}\n`); | ||
return responseStr; | ||
} catch (error: any) { | ||
if (error.response?.status) { | ||
console.error(error.response.status, error.message); | ||
error.response.data.on("data", (data: any) => { | ||
const message = data.toString(); | ||
try { | ||
const parsed = JSON.parse(message); | ||
console.error("An error occurred during OpenAI request: ", parsed); | ||
} catch (error) { | ||
console.error("An error occurred during OpenAI request: ", message); | ||
} | ||
}); | ||
} else { | ||
console.error("An error occurred during OpenAI request", error); | ||
} | ||
} | ||
} | ||
public async askStream(data: (arg0: string) => void, prompt: string, conversationId: string = "default", userName: string = "User") { | ||
let conversation = this.getConversation(conversationId, userName); | ||
let promptStr = this.generatePrompt(conversation, prompt); | ||
try { | ||
const response = await this.openAi.createCompletion( | ||
{ | ||
model: this.options.model, | ||
prompt: promptStr, | ||
temperature: this.options.temperature, | ||
max_tokens: this.options.max_tokens, | ||
top_p: this.options.top_p, | ||
frequency_penalty: this.options.frequency_penalty, | ||
presence_penalty: this.options.presence_penalty, | ||
stop: [this.options.stop], | ||
stream: true, | ||
}, | ||
}, | ||
); | ||
{ responseType: "stream" }, | ||
); | ||
let responseStr = response.data.choices[0].text | ||
.replace(/<\|im_end\|>/g, "") | ||
.replace(this.options.stop, "") | ||
.replace(`${conversation.aiName}: `, "") | ||
.trim(); | ||
conversation.messages.push(`${responseStr}${this.options.stop}\n`); | ||
return responseStr; | ||
let responseStr = ""; | ||
for await (const message of this.streamCompletion(response.data)) { | ||
try { | ||
const parsed = JSON.parse(message); | ||
const { text } = parsed.choices[0]; | ||
responseStr += text; | ||
data(text); | ||
} catch (error) { | ||
console.error("Could not JSON parse stream message", message, error); | ||
} | ||
} | ||
responseStr = responseStr | ||
.replace(/<\|im_end\|>/g, "") | ||
.replace(this.options.stop, "") | ||
.replace(`${this.options.aiName}: `, "") | ||
.trim(); | ||
conversation.messages.push(`${responseStr}${this.options.stop}\n`); | ||
return responseStr; | ||
} catch (error: any) { | ||
if (error.response?.status) { | ||
console.error(error.response.status, error.message); | ||
error.response.data.on("data", (data: any) => { | ||
const message = data.toString(); | ||
try { | ||
const parsed = JSON.parse(message); | ||
console.error("An error occurred during OpenAI request: ", parsed); | ||
} catch (error) { | ||
console.error("An error occurred during OpenAI request: ", message); | ||
} | ||
}); | ||
} else { | ||
console.error("An error occurred during OpenAI request", error); | ||
} | ||
} | ||
} | ||
@@ -119,5 +217,5 @@ | ||
prompt = [",", "!", "?", "."].includes(prompt[prompt.length - 1]) ? prompt : `${prompt}.`; // Thanks to https://github.com/optionsx | ||
conversation.messages.push(`${conversation.userName}":${prompt}\n${conversation.aiName}:`); | ||
conversation.messages.push(`${conversation.userName}":${prompt}\n${this.options.aiName}:`); | ||
if (!conversation.messages[0].includes("Current date:")) conversation.messages[0] = this.getInstructions(conversation.aiName); | ||
if (!conversation.messages[0].includes("Current date:")) conversation.messages[0] = this.getInstructions(); | ||
@@ -130,3 +228,3 @@ let promptStr = conversation.messages.join(); | ||
conversation.messages.shift(); | ||
if (!conversation.messages[0].includes("Current date:")) conversation.messages[0] = this.getInstructions(conversation.aiName); | ||
if (!conversation.messages[0].includes("Current date:")) conversation.messages[0] = this.getInstructions(); | ||
promptStr = conversation.messages.join(); | ||
@@ -133,0 +231,0 @@ promptEncodedLength = encode(promptStr).length; |
@@ -5,3 +5,2 @@ interface conversation { | ||
userName: string; | ||
aiName: string; | ||
lastActive?: number; | ||
@@ -8,0 +7,0 @@ } |
@@ -10,4 +10,5 @@ interface options { | ||
stop?: string; | ||
aiName?: string; | ||
} | ||
export default options; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
63643
23
548
2
+ Addedopenai@3.1.0
+ Addedaxios@0.26.1(transitive)
+ Addedopenai@3.1.0(transitive)
- Removedaxios@1.2.2
- Removedaxios@1.2.2(transitive)
- Removedproxy-from-env@1.1.0(transitive)