Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

gpt-tokens

Package Overview
Dependencies
Maintainers
1
Versions
30
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

gpt-tokens - npm Package Compare versions

Comparing version 1.0.6 to 1.0.7

22

index.js

@@ -120,13 +120,2 @@ "use strict";

num_tokens_from_messages(messages, model) {
let encoding;
let tokens_per_message;
let tokens_per_name;
let num_tokens = 0;
try {
encoding = (0, tiktoken_1.encoding_for_model)(model);
}
catch (e) {
this.warning('model not found. Using cl100k_base encoding.');
encoding = (0, tiktoken_1.get_encoding)('cl100k_base');
}
if (model === 'gpt-3.5-turbo') {

@@ -158,2 +147,13 @@ this.warning('gpt-3.5-turbo may change over time. Returning num tokens assuming gpt-3.5-turbo-0301.');

}
let encoding;
let tokens_per_message;
let tokens_per_name;
let num_tokens = 0;
try {
encoding = (0, tiktoken_1.encoding_for_model)(model);
}
catch (e) {
this.warning('model not found. Using cl100k_base encoding.');
encoding = (0, tiktoken_1.get_encoding)('cl100k_base');
}
if (model === 'gpt-3.5-turbo-0301') {

@@ -160,0 +160,0 @@ tokens_per_message = 4;

@@ -172,15 +172,2 @@ import { encoding_for_model, get_encoding, Tiktoken } from '@dqbd/tiktoken'

private num_tokens_from_messages (messages: MessageItem [], model: supportModelType): number {
let encoding!: Tiktoken
let tokens_per_message!: number
let tokens_per_name !: number
let num_tokens = 0
try {
encoding = encoding_for_model(model)
} catch (e) {
this.warning('model not found. Using cl100k_base encoding.')
encoding = get_encoding('cl100k_base')
}
if (model === 'gpt-3.5-turbo') {

@@ -218,2 +205,15 @@ this.warning('gpt-3.5-turbo may change over time. Returning num tokens assuming gpt-3.5-turbo-0301.')

let encoding!: Tiktoken
let tokens_per_message!: number
let tokens_per_name !: number
let num_tokens = 0
try {
encoding = encoding_for_model(model)
} catch (e) {
this.warning('model not found. Using cl100k_base encoding.')
encoding = get_encoding('cl100k_base')
}
if (model === 'gpt-3.5-turbo-0301') {

@@ -220,0 +220,0 @@ tokens_per_message = 4

{
"name": "gpt-tokens",
"version": "1.0.6",
"version": "1.0.7",
"description": "Calculate the token consumption and amount of openai gpt message",

@@ -5,0 +5,0 @@ "keywords": [

@@ -0,0 +0,0 @@ # gpt-tokens

@@ -0,0 +0,0 @@ const { GPTTokens } = require('./index')

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc