Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

chatgpt-official

Package Overview
Dependencies
Maintainers
1
Versions
20
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

chatgpt-official - npm Package Compare versions

Comparing version 1.1.5 to 1.1.6

dist/models/chatgpt-usage.d.ts

4

dist/classes/chatgpt.d.ts

@@ -0,1 +1,2 @@

import Usage from "../models/chatgpt-usage.js";
import Options from "../models/chatgpt-options.js";

@@ -8,2 +9,3 @@ import Conversation from "../models/conversation.js";

options: Options;
onUsage: (usage: Usage) => void;
constructor(key: string, options?: Options);

@@ -18,3 +20,3 @@ private chunksToLines;

ask(prompt: string, conversationId?: string, userName?: string): Promise<string>;
askStream(data: (arg0: string) => void, prompt: string, conversationId?: string, userName?: string): Promise<string>;
askStream(data: (arg0: string) => void, usage: (usage: Usage) => void, prompt: string, conversationId?: string, userName?: string): Promise<string>;
private generatePrompt;

@@ -21,0 +23,0 @@ private generateMessages;

@@ -9,2 +9,3 @@ import { encode } from "gpt-3-encoder";

options;
onUsage;
constructor(key, options) {

@@ -21,2 +22,3 @@ this.key = key;

instructions: options?.instructions || `You are ChatGPT, a language model developed by OpenAI. You are designed to respond to user input in a conversational manner, Answer as concisely as possible. Your training data comes from a diverse range of internet text and You have been trained to generate human-like responses to various questions and prompts. You can provide information on a wide range of topics, but your knowledge is limited to what was present in your training data, which has a cutoff date of 2021. You strive to provide accurate and helpful information to the best of your ability.\nKnowledge cutoff: 2021-09`,
max_conversation_tokens: options?.max_conversation_tokens || 4097,
};

@@ -83,7 +85,8 @@ }

async ask(prompt, conversationId = "default", userName = "User") {
return await this.askStream((data) => { }, prompt, conversationId, userName);
return await this.askStream((data) => { }, (data) => { }, prompt, conversationId, userName);
}
async askStream(data, prompt, conversationId = "default", userName = "User") {
async askStream(data, usage, prompt, conversationId = "default", userName = "User") {
let conversation = this.getConversation(conversationId, userName);
let promptStr = this.generatePrompt(conversation, prompt);
let prompt_tokens = this.countTokens(promptStr);
try {

@@ -121,2 +124,11 @@ const response = await axios.post(`https://api.openai.com/v1/chat/completions`, {

}
let completion_tokens = encode(responseStr).length;
let usageData = {
prompt_tokens: prompt_tokens,
completion_tokens: completion_tokens,
total_tokens: prompt_tokens + completion_tokens,
};
usage(usageData);
if (this.onUsage)
this.onUsage(usageData);
return responseStr;

@@ -147,3 +159,3 @@ }

let totalLength = promptEncodedLength + this.options.max_tokens;
while (totalLength > 99999999999999999999999999999) {
while (totalLength > this.options.max_conversation_tokens) {
conversation.messages.shift();

@@ -159,10 +171,8 @@ messages = this.generateMessages(conversation);

let messages = [];
messages.push({
role: "system",
content: this.getInstructions(conversation.userName),
});
for (let i = 0; i < conversation.messages.length; i++) {
let message = conversation.messages[i];
if (i === 0) {
messages.push({
role: "system",
content: this.getInstructions(conversation.userName),
});
}
messages.push({

@@ -169,0 +179,0 @@ role: message.type === MessageType.User ? "user" : "assistant",

@@ -9,3 +9,4 @@ interface ChatGPTOptions {

instructions?: string;
max_conversation_tokens?: number;
}
export default ChatGPTOptions;

@@ -11,2 +11,4 @@ import { ChatGPT } from "../dist/index.js";

// bot.onUsage = console.log;
async function main() {

@@ -23,3 +25,3 @@ while (true) {

process.stdout.write(res.toString());
}, prompt);
}, _ => { }, prompt);
console.log();

@@ -26,0 +28,0 @@ }

{
"name": "chatgpt-official",
"version": "1.1.5",
"version": "1.1.6",
"description": "ChatGPT Client using official OpenAI API",

@@ -9,2 +9,3 @@ "main": "dist/index.js",

"start": "tsc;node examples/chat.js",
"startcli": "tsc;node examples/cli.js",
"build": "tsc"

@@ -11,0 +12,0 @@ },

import { encode } from "gpt-3-encoder";
import axios from "axios";
import Usage from "../models/chatgpt-usage.js";
import Options from "../models/chatgpt-options.js";

@@ -14,2 +15,3 @@ import Conversation from "../models/conversation.js";

public options: Options;
public onUsage: (usage: Usage) => void;
constructor(key: string, options?: Options) {

@@ -26,2 +28,3 @@ this.key = key;

instructions: options?.instructions || `You are ChatGPT, a language model developed by OpenAI. You are designed to respond to user input in a conversational manner, Answer as concisely as possible. Your training data comes from a diverse range of internet text and You have been trained to generate human-like responses to various questions and prompts. You can provide information on a wide range of topics, but your knowledge is limited to what was present in your training data, which has a cutoff date of 2021. You strive to provide accurate and helpful information to the best of your ability.\nKnowledge cutoff: 2021-09`,
max_conversation_tokens: options?.max_conversation_tokens || 4097,
};

@@ -99,9 +102,15 @@ }

public async ask(prompt: string, conversationId: string = "default", userName: string = "User") {
return await this.askStream((data) => {}, prompt, conversationId, userName);
return await this.askStream(
(data) => {},
(data) => {},
prompt,
conversationId,
userName,
);
}
public async askStream(data: (arg0: string) => void, prompt: string, conversationId: string = "default", userName: string = "User") {
public async askStream(data: (arg0: string) => void, usage: (usage: Usage) => void, prompt: string, conversationId: string = "default", userName: string = "User") {
let conversation = this.getConversation(conversationId, userName);
let promptStr = this.generatePrompt(conversation, prompt);
let prompt_tokens = this.countTokens(promptStr);
try {

@@ -145,2 +154,12 @@ const response = await axios.post(

let completion_tokens = encode(responseStr).length;
let usageData = {
prompt_tokens: prompt_tokens,
completion_tokens: completion_tokens,
total_tokens: prompt_tokens + completion_tokens,
};
usage(usageData);
if(this.onUsage) this.onUsage(usageData);
return responseStr;

@@ -174,3 +193,3 @@ } catch (error: any) {

while (totalLength > 99999999999999999999999999999) {
while (totalLength > this.options.max_conversation_tokens) {
conversation.messages.shift();

@@ -188,10 +207,8 @@ messages = this.generateMessages(conversation);

let messages: Message[] = [];
messages.push({
role: "system",
content: this.getInstructions(conversation.userName),
});
for (let i = 0; i < conversation.messages.length; i++) {
let message = conversation.messages[i];
if (i === 0) {
messages.push({
role: "system",
content: this.getInstructions(conversation.userName),
});
}
messages.push({

@@ -198,0 +215,0 @@ role: message.type === MessageType.User ? "user" : "assistant",

@@ -9,4 +9,5 @@ interface ChatGPTOptions {

instructions?: string;
max_conversation_tokens?: number;
}
export default ChatGPTOptions;

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc