Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

gpt-tokens

Package Overview
Dependencies
Maintainers
0
Versions
30
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

gpt-tokens - npm Package Compare versions

Comparing version 1.3.6 to 1.3.7

5

dist/index.d.ts

@@ -10,4 +10,7 @@ import { TokenPrice } from './tokenPrice';

}
export declare function getEncodingForModelCached(model: supportModelType): Tiktoken;
export declare class GPTTokens extends TokenPrice {
protected static modelEncodingCache: {
[key in supportModelType]?: Tiktoken;
};
protected static getEncodingForModelCached(model: supportModelType): Tiktoken;
constructor(options: {

@@ -14,0 +17,0 @@ model?: supportModelType;

34

dist/index.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GPTTokens = exports.getEncodingForModelCached = void 0;
exports.GPTTokens = void 0;
const js_tiktoken_1 = require("js-tiktoken");
const openai_chat_tokens_1 = require("openai-chat-tokens");
const tokenPrice_1 = require("./tokenPrice");
const modelEncodingCache = {};
function getEncodingForModelCached(model) {
if (!modelEncodingCache[model]) {
try {
modelEncodingCache[model] = (0, js_tiktoken_1.encodingForModel)(model);
class GPTTokens extends tokenPrice_1.TokenPrice {
static getEncodingForModelCached(model) {
const modelEncodingCache = GPTTokens.modelEncodingCache;
if (!modelEncodingCache[model]) {
try {
modelEncodingCache[model] = (0, js_tiktoken_1.encodingForModel)(model);
}
catch (e) {
console.error('Model not found. Using cl100k_base encoding.');
modelEncodingCache[model] = (0, js_tiktoken_1.getEncoding)('cl100k_base');
}
}
catch (e) {
console.error('Model not found. Using cl100k_base encoding.');
modelEncodingCache[model] = (0, js_tiktoken_1.getEncoding)('cl100k_base');
}
return modelEncodingCache[model];
}
return modelEncodingCache[model];
}
exports.getEncodingForModelCached = getEncodingForModelCached;
class GPTTokens extends tokenPrice_1.TokenPrice {
constructor(options) {

@@ -37,3 +36,3 @@ super();

if (!this.messages && !this.training && !this.tools)
throw new Error('Must set on of messages | training | function');
throw new Error('Must set one of messages | training | function');
if (this.fineTuneModel && !this.fineTuneModel.startsWith('ft:gpt'))

@@ -91,3 +90,3 @@ throw new Error(`Fine-tuning is not supported for ${this.fineTuneModel}`);

let encoding;
encoding = getEncodingForModelCached(model);
encoding = GPTTokens.getEncodingForModelCached(model);
return encoding.encode(content).length;

@@ -136,3 +135,3 @@ }

}
encoding = getEncodingForModelCached(model);
encoding = GPTTokens.getEncodingForModelCached(model);
// This is a port of the Python code from

@@ -161,1 +160,2 @@ //

exports.GPTTokens = GPTTokens;
GPTTokens.modelEncodingCache = {};

@@ -1,2 +0,2 @@

export type supportModelType = 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-4' | 'gpt-4-32k' | 'gpt-4-turbo-preview' | 'gpt-3.5-turbo-0301' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-4-1106-preview' | 'gpt-4-0125-preview' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-turbo' | 'gpt-4o' | 'gpt-4o-2024-05-13';
export type supportModelType = 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-4' | 'gpt-4-32k' | 'gpt-4-turbo-preview' | 'gpt-3.5-turbo-0301' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-4-1106-preview' | 'gpt-4-0125-preview' | 'gpt-4-turbo-2024-04-09' | 'gpt-4-turbo' | 'gpt-4o' | 'gpt-4o-2024-05-13' | 'gpt-4o-mini' | 'gpt-4o-mini-2024-07-18';
/**

@@ -3,0 +3,0 @@ * Pricing

@@ -25,2 +25,3 @@ "use strict";

'gpt-4o': 'gpt-4o-2024-05-13',
'gpt-4o-mini': 'gpt-4o-mini-2024-07-18',
};

@@ -33,14 +34,15 @@ /**

Pricing.incrementalModels = {
'gpt-4o-2024-05-13': [0.005, 0.015], // 2024-05-13
'gpt-4-turbo-2024-04-09': [0.01, 0.03], // 2024-04-09
'gpt-4-0314': [0.03, 0.06], // 2023-03-14
'gpt-4-32k-0314': [0.06, 0.12], // 2023-03-14
'gpt-4-0613': [0.03, 0.06, 0.0080], // 2023-06-13 (Fine-tuning experimental)
'gpt-4-32k-0613': [0.06, 0.12], // 2023-06-13
'gpt-4-1106-preview': [0.01, 0.03], // 2023-11-06
'gpt-4-0125-preview': [0.01, 0.03], // 2024-01-25
'gpt-3.5-turbo-0301': [0.0015, 0.0020], // 2023-03-01
'gpt-3.5-turbo-0613': [0.0015, 0.0020, 0.0080], // 2023-06-13
'gpt-3.5-turbo-16k-0613': [0.0030, 0.0040], // 2023-06-13
'gpt-3.5-turbo-1106': [0.0010, 0.0020, 0.0080], // 2023-11-06 (Fine-tuning recommended)
'gpt-4o-2024-05-13': [0.005, 0.015],
'gpt-4o-mini-2024-07-18': [0.00015, 0.0006],
'gpt-4-turbo-2024-04-09': [0.01, 0.03],
'gpt-4-0314': [0.03, 0.06],
'gpt-4-32k-0314': [0.06, 0.12],
'gpt-4-0613': [0.03, 0.06, 0.0080],
'gpt-4-32k-0613': [0.06, 0.12],
'gpt-4-1106-preview': [0.01, 0.03],
'gpt-4-0125-preview': [0.01, 0.03],
'gpt-3.5-turbo-0301': [0.0015, 0.0020],
'gpt-3.5-turbo-0613': [0.0015, 0.0020, 0.0080],
'gpt-3.5-turbo-16k-0613': [0.0030, 0.0040],
'gpt-3.5-turbo-1106': [0.0010, 0.0020, 0.0080],
'gpt-3.5-turbo-0125': [0.0005, 0.0015], // 2024-01-25 (Fine-tuning is coming soon)

@@ -47,0 +49,0 @@ };

{
"name": "gpt-tokens",
"version": "1.3.6",
"version": "1.3.7",
"description": "Calculate the token consumption and amount of openai gpt message",

@@ -16,3 +16,4 @@ "keywords": [

"scripts": {
"test": "node test.js",
"test": "node tests/index.js",
"test:perf": "node tests/perf.js",
"build": "npm i && rm -rf dist && tsc && npm run test"

@@ -22,5 +23,5 @@ },

".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.js"
"types": "./dist/index.d.ts",
"import": "./dist/index.js",
"require": "./dist/index.js"
}

@@ -36,3 +37,3 @@ },

"decimal.js": "^10.4.3",
"js-tiktoken": "^1.0.10",
"js-tiktoken": "^1.0.12",
"openai-chat-tokens": "^0.2.8"

@@ -42,4 +43,4 @@ },

"@types/node": "^12.20.0",
"typescript": "^5.0.4",
"openai": "^4.28.0"
"openai": "^4.28.0",
"typescript": "^5.0.4"
},

@@ -46,0 +47,0 @@ "files": [

@@ -40,2 +40,4 @@ # gpt-tokens

* gpt-4o-2024-05-13
* gpt-4o-mini
* gpt-4o-mini-2024-07-18

@@ -42,0 +44,0 @@ ### Fine Tune Models

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc