Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

gpt-tokens

Package Overview
Dependencies
Maintainers
1
Versions
30
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

gpt-tokens - npm Package Compare versions

Comparing version 1.0.2 to 1.0.3

52

index.js

@@ -65,7 +65,9 @@ "use strict";

].includes(this.model)) {
const promptUSD = new decimal_js_1.default(this.promptUsedTokens)
.mul(this.gpt4_8kPromptTokenUnit);
const completionUSD = new decimal_js_1.default(this.completionUsedTokens)
.mul(this.gpt4_8kCompletionTokenUnit);
return promptUSD.sub(completionUSD).toNumber();
// const promptUSD = new Decimal(this.promptUsedTokens)
// .mul(this.gpt4_8kPromptTokenUnit)
// const completionUSD = new Decimal(this.completionUsedTokens)
// .mul(this.gpt4_8kCompletionTokenUnit)
//
// return promptUSD.add(completionUSD).toNumber()
return new decimal_js_1.default(this.usedTokens).mul(new decimal_js_1.default(0.00003)).toNumber();
}

@@ -76,7 +78,9 @@ if ([

].includes(this.model)) {
const promptUSD = new decimal_js_1.default(this.promptUsedTokens)
.mul(this.gpt4_32kPromptTokenUnit);
const completionUSD = new decimal_js_1.default(this.completionUsedTokens)
.mul(this.gpt4_32kCompletionTokenUnit);
return promptUSD.sub(completionUSD).toNumber();
// const promptUSD = new Decimal(this.promptUsedTokens)
// .mul(this.gpt4_32kPromptTokenUnit)
// const completionUSD = new Decimal(this.completionUsedTokens)
// .mul(this.gpt4_32kCompletionTokenUnit)
//
// return promptUSD.add(completionUSD).toNumber()
return new decimal_js_1.default(this.usedTokens).mul(new decimal_js_1.default(0.00003)).toNumber();
}

@@ -198,5 +202,31 @@ throw new Error('Model not supported.');

});
const test3 = new GPTTokens({
model: 'gpt-4',
messages: [
{
role: 'assistant',
content: 'Hello for the fifth time! I\'m still here and ready to assist you with any questions or concerns you may have. Don\'t hesitate to ask!',
},
{ role: 'user', content: 'hello 6' },
{
role: 'assistant',
content: 'Hello for the sixth time! I\'m still here, eager to help and answer any questions you may have. Just let me know how I can assist you.',
},
{ role: 'user', content: 'hello 7' },
{
role: 'assistant',
content: 'Hello for the seventh time! I\'m still here and happy to help with any questions or concerns you may have. Please feel free to ask anything you\'d like.',
},
{ role: 'user', content: 'hello 8' },
{
role: 'assistant',
content: 'Hello for the eighth time! I\'m still here and ready to assist you with any questions or information you may need. Don\'t hesitate to ask!',
},
],
});
(0, assert_1.default)(test1.usedTokens === 18
&& test1.usedUSD === 0.000036
&& test2.usedTokens === 16
&& test2.usedUSD === 0.00048, 'Error: TiktokenLite test failed');
&& test2.usedUSD === 0.00048
&& test3.usedTokens === 165
&& test3.usedUSD === 0.00495, 'Error: TiktokenLite test failed');

@@ -110,9 +110,10 @@ import { encoding_for_model, get_encoding, Tiktoken } from '@dqbd/tiktoken'

].includes(this.model)) {
const promptUSD = new Decimal(this.promptUsedTokens)
.mul(this.gpt4_8kPromptTokenUnit)
// const promptUSD = new Decimal(this.promptUsedTokens)
// .mul(this.gpt4_8kPromptTokenUnit)
// const completionUSD = new Decimal(this.completionUsedTokens)
// .mul(this.gpt4_8kCompletionTokenUnit)
//
// return promptUSD.add(completionUSD).toNumber()
const completionUSD = new Decimal(this.completionUsedTokens)
.mul(this.gpt4_8kCompletionTokenUnit)
return promptUSD.sub(completionUSD).toNumber()
return new Decimal(this.usedTokens).mul(new Decimal(0.00003)).toNumber()
}

@@ -124,8 +125,10 @@

].includes(this.model)) {
const promptUSD = new Decimal(this.promptUsedTokens)
.mul(this.gpt4_32kPromptTokenUnit)
const completionUSD = new Decimal(this.completionUsedTokens)
.mul(this.gpt4_32kCompletionTokenUnit)
// const promptUSD = new Decimal(this.promptUsedTokens)
// .mul(this.gpt4_32kPromptTokenUnit)
// const completionUSD = new Decimal(this.completionUsedTokens)
// .mul(this.gpt4_32kCompletionTokenUnit)
//
// return promptUSD.add(completionUSD).toNumber()
return promptUSD.sub(completionUSD).toNumber()
return new Decimal(this.usedTokens).mul(new Decimal(0.00003)).toNumber()
}

@@ -267,5 +270,33 @@

const test3 = new GPTTokens({
model : 'gpt-4',
messages: [
{
role : 'assistant',
content: 'Hello for the fifth time! I\'m still here and ready to assist you with any questions or concerns you may have. Don\'t hesitate to ask!',
},
{ role: 'user', content: 'hello 6' },
{
role : 'assistant',
content: 'Hello for the sixth time! I\'m still here, eager to help and answer any questions you may have. Just let me know how I can assist you.',
},
{ role: 'user', content: 'hello 7' },
{
role : 'assistant',
content: 'Hello for the seventh time! I\'m still here and happy to help with any questions or concerns you may have. Please feel free to ask anything you\'d like.',
},
{ role: 'user', content: 'hello 8' },
{
role : 'assistant',
content: 'Hello for the eighth time! I\'m still here and ready to assist you with any questions or information you may need. Don\'t hesitate to ask!',
},
],
})
assert(test1.usedTokens === 18
&& test1.usedUSD === 0.000036
&& test2.usedTokens === 16
&& test2.usedUSD === 0.00048, 'Error: TiktokenLite test failed')
&& test2.usedUSD === 0.00048
&& test3.usedTokens === 165
&& test3.usedUSD === 0.00495
, 'Error: TiktokenLite test failed')
{
"name": "gpt-tokens",
"version": "1.0.2",
"version": "1.0.3",
"description": "Calculate the token consumption and amount of openai gpt message",

@@ -16,3 +16,4 @@ "keywords": [

"scripts": {
"build": "tsc"
"build": "yarn && tsc",
"publish": "yarn build && npm publish"
},

@@ -19,0 +20,0 @@ "main": "index.js",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc