gpt-tokens
Advanced tools
Comparing version 1.0.6 to 1.0.7
22
index.js
@@ -120,13 +120,2 @@ "use strict"; | ||
num_tokens_from_messages(messages, model) { | ||
let encoding; | ||
let tokens_per_message; | ||
let tokens_per_name; | ||
let num_tokens = 0; | ||
try { | ||
encoding = (0, tiktoken_1.encoding_for_model)(model); | ||
} | ||
catch (e) { | ||
this.warning('model not found. Using cl100k_base encoding.'); | ||
encoding = (0, tiktoken_1.get_encoding)('cl100k_base'); | ||
} | ||
if (model === 'gpt-3.5-turbo') { | ||
@@ -158,2 +147,13 @@ this.warning('gpt-3.5-turbo may change over time. Returning num tokens assuming gpt-3.5-turbo-0301.'); | ||
} | ||
let encoding; | ||
let tokens_per_message; | ||
let tokens_per_name; | ||
let num_tokens = 0; | ||
try { | ||
encoding = (0, tiktoken_1.encoding_for_model)(model); | ||
} | ||
catch (e) { | ||
this.warning('model not found. Using cl100k_base encoding.'); | ||
encoding = (0, tiktoken_1.get_encoding)('cl100k_base'); | ||
} | ||
if (model === 'gpt-3.5-turbo-0301') { | ||
@@ -160,0 +160,0 @@ tokens_per_message = 4; |
26
index.ts
@@ -172,15 +172,2 @@ import { encoding_for_model, get_encoding, Tiktoken } from '@dqbd/tiktoken' | ||
private num_tokens_from_messages (messages: MessageItem [], model: supportModelType): number { | ||
let encoding!: Tiktoken | ||
let tokens_per_message!: number | ||
let tokens_per_name !: number | ||
let num_tokens = 0 | ||
try { | ||
encoding = encoding_for_model(model) | ||
} catch (e) { | ||
this.warning('model not found. Using cl100k_base encoding.') | ||
encoding = get_encoding('cl100k_base') | ||
} | ||
if (model === 'gpt-3.5-turbo') { | ||
@@ -218,2 +205,15 @@ this.warning('gpt-3.5-turbo may change over time. Returning num tokens assuming gpt-3.5-turbo-0301.') | ||
let encoding!: Tiktoken | ||
let tokens_per_message!: number | ||
let tokens_per_name !: number | ||
let num_tokens = 0 | ||
try { | ||
encoding = encoding_for_model(model) | ||
} catch (e) { | ||
this.warning('model not found. Using cl100k_base encoding.') | ||
encoding = get_encoding('cl100k_base') | ||
} | ||
if (model === 'gpt-3.5-turbo-0301') { | ||
@@ -220,0 +220,0 @@ tokens_per_message = 4 |
{ | ||
"name": "gpt-tokens", | ||
"version": "1.0.6", | ||
"version": "1.0.7", | ||
"description": "Calculate the token consumption and amount of openai gpt message", | ||
@@ -5,0 +5,0 @@ "keywords": [ |
@@ -0,0 +0,0 @@ # gpt-tokens |
@@ -0,0 +0,0 @@ const { GPTTokens } = require('./index') |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Deprecated
MaintenanceThe maintainer of the package marked it as deprecated. This could indicate that a single version should not be used, or that the package is no longer maintained and any new vulnerabilities will not be fixed.
Found 1 instance in 1 package
22945
0