Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/openai

Package Overview
Dependencies
Maintainers
2
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/openai - npm Package Compare versions

Comparing version 0.0.2 to 0.0.3

24

./dist/index.js

@@ -24,2 +24,3 @@ "use strict";

OpenAI: () => OpenAI,
createOpenAI: () => createOpenAI,
openai: () => openai

@@ -796,8 +797,29 @@ });

};
var openai = new OpenAI();
// src/openai-provider.ts
function createOpenAI(options = {}) {
const openai2 = new OpenAI(options);
const provider = function(modelId, settings) {
if (new.target) {
throw new Error(
"The OpenAI model function cannot be called with the new keyword."
);
}
if (modelId === "gpt-3.5-turbo-instruct") {
return openai2.completion(modelId, settings);
} else {
return openai2.chat(modelId, settings);
}
};
provider.chat = openai2.chat.bind(openai2);
provider.completion = openai2.completion.bind(openai2);
return provider;
}
var openai = createOpenAI();
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
OpenAI,
createOpenAI,
openai
});
//# sourceMappingURL=index.js.map

@@ -96,3 +96,3 @@ import { LanguageModelV1 } from '@ai-sdk/provider';

/**
* OpenAI provider.
* @deprecated Use `createOpenAI` instead.
*/

@@ -131,7 +131,35 @@ declare class OpenAI {

}
interface OpenAIProvider {
(modelId: 'gpt-3.5-turbo-instruct', settings?: OpenAICompletionSettings): OpenAICompletionLanguageModel;
(modelId: OpenAIChatModelId, settings?: OpenAIChatSettings): OpenAIChatLanguageModel;
chat(modelId: OpenAIChatModelId, settings?: OpenAIChatSettings): OpenAIChatLanguageModel;
completion(modelId: OpenAICompletionModelId, settings?: OpenAICompletionSettings): OpenAICompletionLanguageModel;
}
/**
* Create an OpenAI provider.
*/
declare function createOpenAI(options?: {
/**
* Base URL for the OpenAI API calls.
*/
baseURL?: string;
/**
* @deprecated Use `baseURL` instead.
*/
baseUrl?: string;
/**
* API key for authenticating requests.
*/
apiKey?: string;
/**
* Organization ID.
*/
organization?: string;
}): OpenAIProvider;
/**
* Default OpenAI provider instance.
*/
declare const openai: OpenAI;
declare const openai: OpenAIProvider;
export { OpenAI, openai };
export { OpenAI, type OpenAIProvider, createOpenAI, openai };

@@ -24,2 +24,3 @@ "use strict";

OpenAI: () => OpenAI,
createOpenAI: () => createOpenAI,
openai: () => openai

@@ -796,8 +797,29 @@ });

};
var openai = new OpenAI();
// src/openai-provider.ts
function createOpenAI(options = {}) {
const openai2 = new OpenAI(options);
const provider = function(modelId, settings) {
if (new.target) {
throw new Error(
"The OpenAI model function cannot be called with the new keyword."
);
}
if (modelId === "gpt-3.5-turbo-instruct") {
return openai2.completion(modelId, settings);
} else {
return openai2.chat(modelId, settings);
}
};
provider.chat = openai2.chat.bind(openai2);
provider.completion = openai2.completion.bind(openai2);
return provider;
}
var openai = createOpenAI();
// Annotate the CommonJS export names for ESM import in node:
0 && (module.exports = {
OpenAI,
createOpenAI,
openai
});
//# sourceMappingURL=index.js.map

2

package.json
{
"name": "@ai-sdk/openai",
"version": "0.0.2",
"version": "0.0.3",
"license": "Apache-2.0",

@@ -5,0 +5,0 @@ "sideEffects": false,

@@ -16,8 +16,8 @@ # Vercel AI SDK - OpenAI Provider

You can import `OpenAI` from `ai/openai` and initialize a provider instance with various settings:
You can import `createOpenAI` from `@ai-sdk/openai` and create a provider instance with various settings:
```ts
import { OpenAI } from '@ai-sdk/openai'
import { createOpenAI } from '@ai-sdk/openai'
const openai = new OpenAI({
const openai = createOpenAI({
baseURL: '', // optional base URL for proxies etc.

@@ -35,4 +35,16 @@ apiKey: '' // optional API key, default to env property OPENAI_API_KEY

## Chat Models
## Models
The OpenAI provider instance is a function that you can invoke to create a model:
```ts
const model = openai('gpt-3.5-turbo');
```
It automatically selects the correct API based on the model id.
You can also provide model-specific parameters or select a model API by using `.chat` or `.completion`.
### Chat Models
You can create models that call the [OpenAI chat API](https://platform.openai.com/docs/api-reference/chat) using the `.chat()` factory method.

@@ -59,3 +71,3 @@ The first argument is the model id, e.g. `gpt-4`.

## Completion Models
### Completion Models

@@ -74,3 +86,3 @@ You can create models that call the [OpenAI completions API](https://platform.openai.com/docs/api-reference/completions) using the `.completion()` factory method.

```ts
const model = openai.chat('gpt-3.5-turbo', {
const model = openai.completion('gpt-3.5-turbo-instruct', {
echo: true, // optional, echo the prompt in addition to the completion

@@ -77,0 +89,0 @@ logitBias: {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc