Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

gpt-tokens

Package Overview
Dependencies
Maintainers
1
Versions
30
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

gpt-tokens - npm Package Compare versions

Comparing version 1.2.0 to 1.3.0

dist/index.d.ts

25

package.json
{
"name": "gpt-tokens",
"version": "1.2.0",
"version": "1.3.0",
"description": "Calculate the token consumption and amount of openai gpt message",

@@ -16,7 +16,14 @@ "keywords": [

"scripts": {
"build": "yarn && tsc && node test.js"
"test": "node test.js",
"build": "npm i && rm -rf dist && tsc && npm run test"
},
"main": "index.js",
"types": "index.d.ts",
"module": "index.ts",
"exports": {
".": {
"import": "./dist/index.ts",
"require": "./dist/index.js"
}
},
"main": "dist/index.js",
"types": "dist/index.d.ts",
"module": "dist/index.ts",
"repository": "https://github.com/Cainier/gpt-tokens",

@@ -27,4 +34,3 @@ "author": "Cainier <xingrong.dev@gmail.com>",

"decimal.js": "^10.4.3",
"js-tiktoken": "^1.0.7",
"openai": "^4.6.1",
"js-tiktoken": "^1.0.10",
"openai-chat-tokens": "^0.2.8"

@@ -34,6 +40,7 @@ },

"@types/node": "^12.20.0",
"typescript": "^5.0.4"
"typescript": "^5.0.4",
"openai": "^4.28.0"
},
"files": [
"index.ts",
"src/index.ts",
"index.d.ts",

@@ -40,0 +47,0 @@ "index.js",

@@ -1,4 +0,4 @@

const fs = require('fs')
const OpenAI = require('openai')
const { GPTTokens, testGPTTokens } = require('./index')
const fs = require('fs')
const OpenAI = require('openai')
const { GPTTokens } = require('./dist/index')

@@ -14,6 +14,66 @@ const [apiKey = process.env.OPENAI_API_KEY] = process.argv.slice(2)

async function testGPTTokens(prompt) {
const messages = [
{ role: 'user', content: prompt },
]
const supportModels = GPTTokens.supportModels
.filter(model => !model.startsWith('ft:'))
const { length: modelsNum } = supportModels
for (let i = 0; i < modelsNum; i += 1) {
const model = supportModels[i]
console.info(`[${i + 1}/${modelsNum}]: Testing ${model}...`)
let ignoreModel = false
const chatCompletion = await openai.chat.completions.create({
model,
messages,
})
.catch(err => {
ignoreModel = true
console.info(`Ignore model ${model}:`)
console.info(err.message)
})
const openaiUsage = chatCompletion?.usage
const gptTokens = new GPTTokens({
model,
messages: [
...messages,
...[chatCompletion?.choices[0].message],
],
})
if (ignoreModel) continue
if (!openaiUsage) {
console.error(`Test ${model} failed (openai return usage is null)`)
continue
}
if (gptTokens.promptUsedTokens !== openaiUsage.prompt_tokens)
throw new Error(`Test ${model} promptUsedTokens failed (openai: ${openaiUsage.prompt_tokens}/ gpt-tokens: ${gptTokens.promptUsedTokens})`)
if (gptTokens.completionUsedTokens !== openaiUsage.completion_tokens)
throw new Error(`Test ${model} completionUsedTokens failed (openai: ${openaiUsage.completion_tokens}/ gpt-tokens: ${gptTokens.completionUsedTokens})`)
if (gptTokens.usedTokens !== openaiUsage?.total_tokens)
throw new Error(`Test ${model} usedTokens failed (openai: ${openaiUsage?.total_tokens}/ gpt-tokens: ${gptTokens.usedTokens})`)
console.info('Pass!')
}
console.info('Test success!')
}
async function testBasic(prompt) {
console.info('Testing GPT...')
await testGPTTokens(openai, prompt)
await testGPTTokens(prompt)
}

@@ -80,3 +140,3 @@

// In production, this could be your backend API or an external API
function getCurrentWeather(location, unit = 'fahrenheit') {
function getCurrentWeather(location) {
if (location.toLowerCase().includes('tokyo')) {

@@ -83,0 +143,0 @@ return JSON.stringify({ location: 'Tokyo', temperature: '10', unit: 'celsius' })

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc