Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

openai-streams

Package Overview
Dependencies
Maintainers
1
Versions
105
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

openai-streams - npm Package Compare versions

Comparing version 1.0.22-canary.2 to 1.0.22-canary.3

2

dist/lib/openai/edge.js

@@ -1,1 +0,1 @@

import{streamArray as t}from"yield-stream";import{ENCODER as c}from"../../globs/shared.js";import{EventStream as h,TokenStream as w}from"../streaming/index.js";const I=async(n,p,{mode:e="tokens",apiKey:s=process.env.OPENAI_API_KEY}={})=>{if(!s)throw new Error("No API key provided. Please set the OPENAI_API_KEY environment variable or pass the { apiKey } option.");const a=n==="completions",o=await fetch(`https://api.openai.com/v1/${n}`,{method:"POST",body:JSON.stringify({...p,stream:a?!0:void 0}),headers:{Authorization:`Bearer ${s}`,"Content-Type":"application/json",Accept:"application/json"}});if(!o.body)throw new Error("No response body");if(a)switch(e){case"tokens":return w(o.body);case"raw":return h(o.body);default:throw new Error(`Invalid mode: ${e}`)}const r=await o.text();switch(e){case"tokens":const d=JSON.parse(r),{text:i}=d.choices?.[0]??{};if(typeof i!="string")return console.error("No text choices received from OpenAI: "+r),t([]);const m=c.encode(i);return t([m]);case"raw":const f=c.encode(r);return t([f]);default:throw new Error(`Invalid mode: ${e}`)}};export{I as OpenAI};
import{streamArray as i,yieldStream as h}from"yield-stream";import{ENCODER as d}from"../../globs/shared.js";import{EventStream as l,TokenStream as y}from"../streaming/index.js";const p=async(t,s,{mode:e="tokens",apiKey:r=process.env.OPENAI_API_KEY}={})=>{if(!r)throw new Error("No API key provided. Please set the OPENAI_API_KEY environment variable or pass the { apiKey } option.");const n=t==="completions",o=await fetch(`https://api.openai.com/v1/${t}`,{method:"POST",body:JSON.stringify({...s,stream:n?!0:void 0}),headers:{Authorization:`Bearer ${r}`,"Content-Type":"application/json",Accept:"application/json"}});if(!o.body)throw new Error("No response body");if(n)switch(e){case"tokens":return y(o.body);case"raw":return l(o.body);default:throw new Error(`Invalid mode: ${e}`)}const a=await o.text();switch(e){case"tokens":const m=JSON.parse(a),{text:c}=m.choices?.[0]??{};if(typeof c!="string")return console.error("No text choices received from OpenAI: "+a),i([]);const f=d.encode(c);return i([f]);case"raw":const w=d.encode(a);return i([w]);default:throw new Error(`Invalid mode: ${e}`)}};p.Node=async(t,s,e)=>{const{Readable:r}=await import("stream"),n=await p(t,s,e);return r.from(h(n))};export{p as OpenAI};

@@ -1,1 +0,1 @@

export * from "./node";
export * from "./edge";

@@ -1,1 +0,1 @@

export*from"./node.js";
export*from"./edge.js";

@@ -17,4 +17,12 @@ /// <reference types="node" />

};
export type OpenAIEdge = <T extends OpenAIAPIEndpoint>(endpoint: T, args: OpenAICreateArgs<T>, options?: OpenAIOptions) => Promise<ReadableStream<Uint8Array>>;
/**
* The OpenAI API client for Edge runtime.
*/
export type OpenAIEdge = (<T extends OpenAIAPIEndpoint>(endpoint: T, args: OpenAICreateArgs<T>, options?: OpenAIOptions) => Promise<ReadableStream<Uint8Array>>) & {
/**
* The OpenAI API client for Node.js runtime.
*/
Node: OpenAINode;
};
export type OpenAINode = <T extends OpenAIAPIEndpoint>(endpoint: T, args: OpenAICreateArgs<T>, options?: OpenAIOptions) => Promise<NodeJS.ReadableStream>;
export * from "./pinned";
{
"name": "openai-streams",
"description": "Tools for working with OpenAI streams in Node.js and TypeScript.",
"version": "1.0.22-canary.2",
"version": "1.0.22-canary.3",
"license": "MIT",

@@ -6,0 +6,0 @@ "type": "module",

@@ -8,4 +8,6 @@ # OpenAI Streams

This library returns OpenAI API responses as streams only. Non-stream endpoints
like `edits` etc. are simply a stream with only one chunk update.
like `edits` etc. are simply a stream with only one chunk update.
Now supports WHATWG *and* Node.js streams via `OpenAI()` and `OpenAI.Node()`.
It simplifies the following:

@@ -56,2 +58,20 @@

### Example: Consuming streams using Next.js API Route (Node)
```ts
import { OpenAI } from "openai-streams";
export default async function handler() {
const stream = await OpenAI.Node(
"completions",
{
model: "text-davinci-003",
prompt: "Write a sentence.\n\n",
max_tokens: 100
},
);
stream.pipe(res);
}
```
#### Example: Consuming streams in Next.js Edge functions

@@ -58,0 +78,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc