Socket
Socket
Sign inDemoInstall

openai

Package Overview
Dependencies
Maintainers
5
Versions
190
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

openai - npm Package Compare versions

Comparing version 4.53.1 to 4.53.2

2

package.json
{
"name": "openai",
"version": "4.53.1",
"version": "4.53.2",
"description": "The official TypeScript library for the OpenAI API",

@@ -5,0 +5,0 @@ "author": "OpenAI <support@openai.com>",

@@ -22,3 +22,3 @@ # OpenAI Node API Library

```ts
import OpenAI from 'https://deno.land/x/openai@v4.53.1/mod.ts';
import OpenAI from 'https://deno.land/x/openai@v4.53.2/mod.ts';
```

@@ -41,3 +41,3 @@

async function main() {
const chatCompletion = await openai.chat.completions.create({
const chatCompletion = await client.chat.completions.create({
messages: [{ role: 'user', content: 'Say this is a test' }],

@@ -61,3 +61,3 @@ model: 'gpt-3.5-turbo',

async function main() {
const stream = await openai.chat.completions.create({
const stream = await client.chat.completions.create({
model: 'gpt-4',

@@ -95,3 +95,3 @@ messages: [{ role: 'user', content: 'Say this is a test' }],

};
const chatCompletion: OpenAI.Chat.ChatCompletion = await openai.chat.completions.create(params);
const chatCompletion: OpenAI.Chat.ChatCompletion = await client.chat.completions.create(params);
}

@@ -311,16 +311,16 @@

// If you have access to Node `fs` we recommend using `fs.createReadStream()`:
await openai.files.create({ file: fs.createReadStream('input.jsonl'), purpose: 'fine-tune' });
await client.files.create({ file: fs.createReadStream('input.jsonl'), purpose: 'fine-tune' });
// Or if you have the web `File` API you can pass a `File` instance:
await openai.files.create({ file: new File(['my bytes'], 'input.jsonl'), purpose: 'fine-tune' });
await client.files.create({ file: new File(['my bytes'], 'input.jsonl'), purpose: 'fine-tune' });
// You can also pass a `fetch` `Response`:
await openai.files.create({ file: await fetch('https://somesite/input.jsonl'), purpose: 'fine-tune' });
await client.files.create({ file: await fetch('https://somesite/input.jsonl'), purpose: 'fine-tune' });
// Finally, if none of the above are convenient, you can use our `toFile` helper:
await openai.files.create({
await client.files.create({
file: await toFile(Buffer.from('my bytes'), 'input.jsonl'),
purpose: 'fine-tune',
});
await openai.files.create({
await client.files.create({
file: await toFile(new Uint8Array([0, 1, 2]), 'input.jsonl'),

@@ -340,3 +340,3 @@ purpose: 'fine-tune',

async function main() {
const job = await openai.fineTuning.jobs
const job = await client.fineTuning.jobs
.create({ model: 'gpt-3.5-turbo', training_file: 'file-abc123' })

@@ -413,3 +413,3 @@ .catch(async (err) => {

// Or, configure per-request:
await openai.chat.completions.create({ messages: [{ role: 'user', content: 'How can I get the name of the current day in Node.js?' }], model: 'gpt-3.5-turbo' }, {
await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I get the name of the current day in Node.js?' }], model: 'gpt-3.5-turbo' }, {
maxRetries: 5,

@@ -431,3 +431,3 @@ });

// Override per-request:
await openai.chat.completions.create({ messages: [{ role: 'user', content: 'How can I list all files in a directory using Python?' }], model: 'gpt-3.5-turbo' }, {
await client.chat.completions.create({ messages: [{ role: 'user', content: 'How can I list all files in a directory using Python?' }], model: 'gpt-3.5-turbo' }, {
timeout: 5 * 1000,

@@ -450,3 +450,3 @@ });

// Automatically fetches more pages as needed.
for await (const fineTuningJob of openai.fineTuning.jobs.list({ limit: 20 })) {
for await (const fineTuningJob of client.fineTuning.jobs.list({ limit: 20 })) {
allFineTuningJobs.push(fineTuningJob);

@@ -461,3 +461,3 @@ }

```ts
let page = await openai.fineTuning.jobs.list({ limit: 20 });
let page = await client.fineTuning.jobs.list({ limit: 20 });
for (const fineTuningJob of page.data) {

@@ -486,3 +486,3 @@ console.log(fineTuningJob);

const response = await openai.chat.completions
const response = await client.chat.completions
.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-3.5-turbo' })

@@ -493,3 +493,3 @@ .asResponse();

const { data: chatCompletion, response: raw } = await openai.chat.completions
const { data: chatCompletion, response: raw } = await client.chat.completions
.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'gpt-3.5-turbo' })

@@ -602,3 +602,3 @@ .withResponse();

// Override per-request:
await openai.models.list({
await client.models.list({
httpAgent: new http.Agent({ keepAlive: false }),

@@ -605,0 +605,0 @@ });

@@ -1,1 +0,1 @@

export const VERSION = '4.53.1'; // x-release-please-version
export const VERSION = '4.53.2'; // x-release-please-version

@@ -1,2 +0,2 @@

export declare const VERSION = "4.53.1";
export declare const VERSION = "4.53.2";
//# sourceMappingURL=version.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VERSION = void 0;
exports.VERSION = '4.53.1'; // x-release-please-version
exports.VERSION = '4.53.2'; // x-release-please-version
//# sourceMappingURL=version.js.map

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc