Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

openai-streams

Package Overview
Dependencies
Maintainers
1
Versions
105
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

openai-streams - npm Package Compare versions

Comparing version 5.9.0 to 5.9.1-cjs.0

2

dist/globs/shared.js

@@ -1,1 +0,1 @@

const e=globalThis.process?.versions?.node?"node":"edge",o=new TextEncoder,n=new TextDecoder;export{n as DECODER,o as ENCODER,e as RUNTIME};
"use strict";var t=Object.defineProperty;var c=Object.getOwnPropertyDescriptor;var E=Object.getOwnPropertyNames;var d=Object.prototype.hasOwnProperty;var x=(o,e)=>{for(var s in e)t(o,s,{get:e[s],enumerable:!0})},p=(o,e,s,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let n of E(e))!d.call(o,n)&&n!==s&&t(o,n,{get:()=>e[n],enumerable:!(r=c(e,n))||r.enumerable});return o};var D=o=>p(t({},"__esModule",{value:!0}),o);var i={};x(i,{DECODER:()=>g,ENCODER:()=>R,RUNTIME:()=>T});module.exports=D(i);const T=globalThis.process?.versions?.node?"node":"edge",R=new TextEncoder,g=new TextDecoder;0&&(module.exports={DECODER,ENCODER,RUNTIME});

@@ -1,1 +0,1 @@

export*from"./lib/index.js";
"use strict";var a=Object.defineProperty;var b=Object.getOwnPropertyDescriptor;var c=Object.getOwnPropertyNames;var d=Object.prototype.hasOwnProperty;var p=(r,o,f,x)=>{if(o&&typeof o=="object"||typeof o=="function")for(let e of c(o))!d.call(r,e)&&e!==f&&a(r,e,{get:()=>o[e],enumerable:!(x=b(o,e))||x.enumerable});return r},t=(r,o,f)=>(p(r,o,"default"),f&&p(f,o,"default"));var g=r=>p(a({},"__esModule",{value:!0}),r);var m={};module.exports=g(m);t(m,require("./lib"),module.exports);0&&(module.exports={...require("./lib")});

@@ -1,1 +0,1 @@

const s=async(n,i,{delay:r,maxRetries:o}={delay:500,maxRetries:7})=>{for(let t=0;t<=o;t++)try{const e=await fetch(n,i);if(!e.ok&&(await e.json()).type==="RATE_LIMIT_REACHED")throw new Error("RATE_LIMIT_REACHED");return e}catch(e){if(e.message==="RATE_LIMIT_REACHED"&&t<o)console.log("Rate limit reached. Retrying in "+r+"ms"),await new Promise(a=>setTimeout(a,r)),r*=2;else throw e}throw new Error("Max retries reached.")};export{s as fetchWithBackoff};
"use strict";var i=Object.defineProperty;var c=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var E=Object.prototype.hasOwnProperty;var R=(r,e)=>{for(var o in e)i(r,o,{get:e[o],enumerable:!0})},h=(r,e,o,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let t of f(e))!E.call(r,t)&&t!==o&&i(r,t,{get:()=>e[t],enumerable:!(n=c(e,t))||n.enumerable});return r};var w=r=>h(i({},"__esModule",{value:!0}),r);var I={};R(I,{fetchWithBackoff:()=>m});module.exports=w(I);const m=async(r,e,{delay:o,maxRetries:n}={delay:500,maxRetries:7})=>{for(let t=0;t<=n;t++)try{const a=await fetch(r,e);if(!a.ok&&(await a.json()).type==="RATE_LIMIT_REACHED")throw new Error("RATE_LIMIT_REACHED");return a}catch(a){if(a.message==="RATE_LIMIT_REACHED"&&t<n)console.log("Rate limit reached. Retrying in "+o+"ms"),await new Promise(s=>setTimeout(s,o)),o*=2;else throw a}throw new Error("Max retries reached.")};0&&(module.exports={fetchWithBackoff});

@@ -1,1 +0,1 @@

const t={NO_API_KEY:"No API key provided. Please set the OPENAI_API_KEY environment variable or pass the { apiKey } option.",MAX_TOKENS:"Maximum number of tokens reached.",UNKNOWN:"An unknown error occurred.",INVALID_API_KEY:"Incorrect API key provided. You can find your API key at https://platform.openai.com/account/api-keys.",INVALID_MODEL:"The model does not exist",RATE_LIMIT_REACHED:"You are sending requests too quickly. Pace your requests. Read the Rate limit guide.",EXCEEDED_QUOTA:"You have hit your maximum monthly spend (hard limit) which you can view in the account billing section. Apply for a quota increase.",ENGINE_OVERLOAD:"Our servers are experiencing high traffic. Please retry your requests after a brief wait.",SERVER_ERROR:"Issue on our servers. Retry your request after a brief wait and contact us if the issue persists. Check the status page."};class o extends Error{type;message;constructor(e){const r=t[e];super(r),this.message=r,this.type=e}toJSON(){return{type:this.type,message:this.message}}}export{o as OpenAIError,t as OpenAIErrors};
"use strict";var o=Object.defineProperty;var i=Object.getOwnPropertyDescriptor;var p=Object.getOwnPropertyNames;var u=Object.prototype.hasOwnProperty;var E=(r,e)=>{for(var t in e)o(r,t,{get:e[t],enumerable:!0})},c=(r,e,t,a)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of p(e))!u.call(r,s)&&s!==t&&o(r,s,{get:()=>e[s],enumerable:!(a=i(e,s))||a.enumerable});return r};var y=r=>c(o({},"__esModule",{value:!0}),r);var I={};E(I,{OpenAIError:()=>A,OpenAIErrors:()=>n});module.exports=y(I);const n={NO_API_KEY:"No API key provided. Please set the OPENAI_API_KEY environment variable or pass the { apiKey } option.",MAX_TOKENS:"Maximum number of tokens reached.",UNKNOWN:"An unknown error occurred.",INVALID_API_KEY:"Incorrect API key provided. You can find your API key at https://platform.openai.com/account/api-keys.",INVALID_MODEL:"The model does not exist",RATE_LIMIT_REACHED:"You are sending requests too quickly. Pace your requests. Read the Rate limit guide.",EXCEEDED_QUOTA:"You have hit your maximum monthly spend (hard limit) which you can view in the account billing section. Apply for a quota increase.",ENGINE_OVERLOAD:"Our servers are experiencing high traffic. Please retry your requests after a brief wait.",SERVER_ERROR:"Issue on our servers. Retry your request after a brief wait and contact us if the issue persists. Check the status page."};class A extends Error{type;message;constructor(e){const t=n[e];super(t),this.message=t,this.type=e}toJSON(){return{type:this.type,message:this.message}}}0&&(module.exports={OpenAIError,OpenAIErrors});

@@ -1,1 +0,1 @@

export*from"./openai/index.js";export*from"./types.js";export*from"./streaming/index.js";export*from"./errors.js";
"use strict";var a=Object.defineProperty;var b=Object.getOwnPropertyDescriptor;var c=Object.getOwnPropertyNames;var d=Object.prototype.hasOwnProperty;var t=(f,e,p,x)=>{if(e&&typeof e=="object"||typeof e=="function")for(let m of c(e))!d.call(f,m)&&m!==p&&a(f,m,{get:()=>e[m],enumerable:!(x=b(e,m))||x.enumerable});return f},r=(f,e,p)=>(t(f,e,"default"),p&&t(p,e,"default"));var g=f=>t(a({},"__esModule",{value:!0}),f);var o={};module.exports=g(o);r(o,require("./openai"),module.exports);r(o,require("./types"),module.exports);r(o,require("./streaming"),module.exports);r(o,require("./errors"),module.exports);0&&(module.exports={...require("./openai"),...require("./types"),...require("./streaming"),...require("./errors")});

@@ -1,1 +0,1 @@

import{streamArray as c}from"yield-stream";import{OpenAIAPIEndpoints as b}from"../types.js";import{ENCODER as m}from"../../globs/shared.js";import{OpenAIError as e}from"../errors.js";import{fetchWithBackoff as u}from"../backoff.js";import{ChatStream as R,EventStream as g,getTokensFromResponse as y,TokenStream as S}from"../streaming/index.js";const $=async(n,h,{mode:r="tokens",apiBase:f="https://api.openai.com/v1",apiKey:i=process.env.OPENAI_API_KEY,apiHeaders:d={},controller:l,onDone:A,onParse:E}={})=>{if(!i)throw new e("NO_API_KEY");const p=n==="completions"||n==="chat",I=b[n],t=await u(`${f}/${I}`,{method:"POST",body:JSON.stringify({...h,stream:p?!0:void 0}),headers:{Authorization:`Bearer ${i}`,"Content-Type":"application/json",Accept:"application/json",...d},signal:l?.signal});switch(t.status){case 401:throw new e("INVALID_API_KEY");case 404:throw new e("INVALID_MODEL");case 429:throw new e("RATE_LIMIT_REACHED");case 500:throw new e("SERVER_ERROR");default:if(!t.body)throw new e("UNKNOWN")}let o;const s={mode:r,onDone:A,onParse:E};if(p)switch(r){case"raw":o=g(t.body,s);break;case"tokens":switch(n){case"chat":o=R(t.body,s);break;default:o=S(t.body,s);break}break;default:throw console.error(`Unknown mode: ${r} for streaming response.`),new e("UNKNOWN")}else{const a=await t.text();switch(r){case"tokens":const N=JSON.parse(a),w=y(N);if(typeof w!="string"){console.error("No text choices received from OpenAI: "+a),o=c([]);break}const O=m.encode(w);o=c([O]);break;case"raw":const k=m.encode(a);o=c([k]);break;default:throw console.error(`Unknown mode: ${r} for non-streaming response.`),new e("UNKNOWN")}}return o};export{$ as OpenAI};
"use strict";var h=Object.defineProperty;var y=Object.getOwnPropertyDescriptor;var S=Object.getOwnPropertyNames;var _=Object.prototype.hasOwnProperty;var C=(e,o)=>{for(var t in o)h(e,t,{get:o[t],enumerable:!0})},P=(e,o,t,i)=>{if(o&&typeof o=="object"||typeof o=="function")for(let n of S(o))!_.call(e,n)&&n!==t&&h(e,n,{get:()=>o[n],enumerable:!(i=y(o,n))||i.enumerable});return e};var T=e=>P(h({},"__esModule",{value:!0}),e);var D={};C(D,{OpenAI:()=>U});module.exports=T(D);var p=require("yield-stream"),A=require("../types"),f=require("../../globs/shared"),r=require("../errors"),E=require("../backoff"),s=require("../streaming");const U=async(e,o,{mode:t="tokens",apiBase:i="https://api.openai.com/v1",apiKey:n=process.env.OPENAI_API_KEY,apiHeaders:I={},controller:N,onDone:O,onParse:k}={})=>{if(!n)throw new r.OpenAIError("NO_API_KEY");const d=e==="completions"||e==="chat",b=A.OpenAIAPIEndpoints[e],c=await(0,E.fetchWithBackoff)(`${i}/${b}`,{method:"POST",body:JSON.stringify({...o,stream:d?!0:void 0}),headers:{Authorization:`Bearer ${n}`,"Content-Type":"application/json",Accept:"application/json",...I},signal:N?.signal});switch(c.status){case 401:throw new r.OpenAIError("INVALID_API_KEY");case 404:throw new r.OpenAIError("INVALID_MODEL");case 429:throw new r.OpenAIError("RATE_LIMIT_REACHED");case 500:throw new r.OpenAIError("SERVER_ERROR");default:if(!c.body)throw new r.OpenAIError("UNKNOWN")}let a;const w={mode:t,onDone:O,onParse:k};if(d)switch(t){case"raw":a=(0,s.EventStream)(c.body,w);break;case"tokens":switch(e){case"chat":a=(0,s.ChatStream)(c.body,w);break;default:a=(0,s.TokenStream)(c.body,w);break}break;default:throw console.error(`Unknown mode: ${t} for streaming response.`),new r.OpenAIError("UNKNOWN")}else{const m=await c.text();switch(t){case"tokens":const u=JSON.parse(m),l=(0,s.getTokensFromResponse)(u);if(typeof l!="string"){console.error("No text choices received from OpenAI: "+m),a=(0,p.streamArray)([]);break}const R=f.ENCODER.encode(l);a=(0,p.streamArray)([R]);break;case"raw":const g=f.ENCODER.encode(m);a=(0,p.streamArray)([g]);break;default:throw console.error(`Unknown mode: ${t} for non-streaming response.`),new r.OpenAIError("UNKNOWN")}}return a};0&&(module.exports={OpenAI});

@@ -1,1 +0,1 @@

export*from"./edge.js";
"use strict";var a=Object.defineProperty;var b=Object.getOwnPropertyDescriptor;var c=Object.getOwnPropertyNames;var d=Object.prototype.hasOwnProperty;var p=(r,o,f,x)=>{if(o&&typeof o=="object"||typeof o=="function")for(let e of c(o))!d.call(r,e)&&e!==f&&a(r,e,{get:()=>o[e],enumerable:!(x=b(o,e))||x.enumerable});return r},t=(r,o,f)=>(p(r,o,"default"),f&&p(f,o,"default"));var g=r=>p(a({},"__esModule",{value:!0}),r);var m={};module.exports=g(m);t(m,require("./edge"),module.exports);0&&(module.exports={...require("./edge")});

@@ -1,1 +0,1 @@

import{Readable as n}from"stream";import{yieldStream as m}from"yield-stream";import{OpenAI as a}from"./edge.js";const I=async(e,o,r)=>{const t=await a(e,o,r);return n.from(m(t))};export{I as OpenAI};
"use strict";var m=Object.defineProperty;var d=Object.getOwnPropertyDescriptor;var s=Object.getOwnPropertyNames;var f=Object.prototype.hasOwnProperty;var A=(o,e)=>{for(var r in e)m(o,r,{get:e[r],enumerable:!0})},I=(o,e,r,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let t of s(e))!f.call(o,t)&&t!==r&&m(o,t,{get:()=>e[t],enumerable:!(n=d(e,t))||n.enumerable});return o};var O=o=>I(m({},"__esModule",{value:!0}),o);var l={};A(l,{OpenAI:()=>c});module.exports=O(l);var a=require("stream"),p=require("yield-stream"),i=require("./edge");const c=async(o,e,r)=>{const n=await(0,i.OpenAI)(o,e,r);return a.Readable.from((0,p.yieldStream)(n))};0&&(module.exports={OpenAI});

@@ -1,1 +0,1 @@

export*from"./streams.js";export*from"./transforms.js";
"use strict";var a=Object.defineProperty;var b=Object.getOwnPropertyDescriptor;var c=Object.getOwnPropertyNames;var d=Object.prototype.hasOwnProperty;var t=(r,o,p,x)=>{if(o&&typeof o=="object"||typeof o=="function")for(let m of c(o))!d.call(r,m)&&m!==p&&a(r,m,{get:()=>o[m],enumerable:!(x=b(o,m))||x.enumerable});return r},f=(r,o,p)=>(t(r,o,"default"),p&&t(p,o,"default"));var g=r=>t(a({},"__esModule",{value:!0}),r);var e={};module.exports=g(e);f(e,require("./streams"),module.exports);f(e,require("./transforms"),module.exports);0&&(module.exports={...require("./streams"),...require("./transforms")});

@@ -1,1 +0,1 @@

import{ENCODER as i,DECODER as c}from"../../globs/shared.js";import{ChatParser as h,TokenParser as y}from"./transforms.js";import{createParser as l}from"eventsource-parser";import{pipeline as d,yieldStream as A}from"yield-stream";import{OpenAIError as k}from"../errors.js";const m=(o,{mode:t="tokens",onDone:s})=>new ReadableStream({async start(e){const f=l(async a=>{if(a.type==="event"){const{data:n}=a;if(n==="[DONE]"){e.desiredSize===null||e.close(),await s?.();return}try{const r=JSON.parse(n);if(e.enqueue(i.encode(n)),t==="tokens"&&r?.choices){const{choices:S}=r;for(const O of S)if(O?.finish_reason==="length")throw new k("MAX_TOKENS")}}catch(r){e.error(r)}}});for await(const a of A(o)){const n=c.decode(a);try{const r=JSON.parse(n);r.hasOwnProperty("error")&&e.error(new Error(r.error.message))}catch{}f.feed(n)}}}),p=({onParse:o})=>async function*(s){const e=c.decode(s);o?.(e),e&&(yield i.encode(e))},x=(o,t={mode:"tokens"})=>d(m(o,t),y,p(t)),v=(o,t={mode:"tokens"})=>d(m(o,t),h,p(t));export{v as ChatStream,m as EventStream,x as TokenStream};
"use strict";var p=Object.defineProperty;var A=Object.getOwnPropertyDescriptor;var k=Object.getOwnPropertyNames;var u=Object.prototype.hasOwnProperty;var I=(r,e)=>{for(var n in e)p(r,n,{get:e[n],enumerable:!0})},E=(r,e,n,t)=>{if(e&&typeof e=="object"||typeof e=="function")for(let a of k(e))!u.call(r,a)&&a!==n&&p(r,a,{get:()=>e[a],enumerable:!(t=A(e,a))||t.enumerable});return r};var w=r=>E(p({},"__esModule",{value:!0}),r);var x={};I(x,{ChatStream:()=>P,EventStream:()=>f,TokenStream:()=>C});module.exports=w(x);var i=require("../../globs/shared"),m=require("./transforms"),O=require("eventsource-parser"),c=require("yield-stream"),h=require("../errors");const f=(r,{mode:e="tokens",onDone:n})=>new ReadableStream({async start(t){const a=(0,O.createParser)(async d=>{if(d.type==="event"){const{data:s}=d;if(s==="[DONE]"){t.desiredSize===null||t.close(),await n?.();return}try{const o=JSON.parse(s);if(t.enqueue(i.ENCODER.encode(s)),e==="tokens"&&o?.choices){const{choices:y}=o;for(const l of y)if(l?.finish_reason==="length")throw new h.OpenAIError("MAX_TOKENS")}}catch(o){t.error(o)}}});for await(const d of(0,c.yieldStream)(r)){const s=i.DECODER.decode(d);try{const o=JSON.parse(s);o.hasOwnProperty("error")&&t.error(new Error(o.error.message))}catch{}a.feed(s)}}}),S=({onParse:r})=>async function*(n){const t=i.DECODER.decode(n);r?.(t),t&&(yield i.ENCODER.encode(t))},C=(r,e={mode:"tokens"})=>(0,c.pipeline)(f(r,e),m.TokenParser,S(e)),P=(r,e={mode:"tokens"})=>(0,c.pipeline)(f(r,e),m.ChatParser,S(e));0&&(module.exports={ChatStream,EventStream,TokenStream});

@@ -1,1 +0,1 @@

import{ENCODER as i,DECODER as d}from"../../globs/shared.js";import{OpenAIError as c}from"../errors.js";const f=o=>{const e=o?.choices?.[0];if(!e)throw console.error("No choices received from OpenAI"),new c("UNKNOWN");const n=e?.text??e?.message;if(typeof n!="string")throw console.error("No text received from OpenAI choice"),new c("UNKNOWN");return n},O=async function*(o){const e=d.decode(o),r=JSON.parse(e)?.choices?.[0],{delta:t}=r??{};if(typeof t!="object")throw console.error("Received invalid delta from OpenAI in ChatParser."),new c("UNKNOWN");const{content:s}=t;s&&(yield i.encode(s))},E=async function*(o){const e=d.decode(o),n=JSON.parse(e),r=f(n);yield i.encode(r)},l=async function*(o){const e=new TextEncoder,r=new TextDecoder().decode(o),t=JSON.parse(r),{logprobs:s}=t?.choices?.[0];s&&(yield e.encode(JSON.stringify(s)))};export{O as ChatParser,l as LogprobsParser,E as TokenParser,f as getTokensFromResponse};
"use strict";var d=Object.defineProperty;var p=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var N=Object.prototype.hasOwnProperty;var m=(o,e)=>{for(var n in e)d(o,n,{get:e[n],enumerable:!0})},O=(o,e,n,s)=>{if(e&&typeof e=="object"||typeof e=="function")for(let r of a(e))!N.call(o,r)&&r!==n&&d(o,r,{get:()=>e[r],enumerable:!(s=p(e,r))||s.enumerable});return o};var E=o=>O(d({},"__esModule",{value:!0}),o);var x={};m(x,{ChatParser:()=>l,LogprobsParser:()=>y,TokenParser:()=>h,getTokensFromResponse:()=>f});module.exports=E(x);var t=require("../../globs/shared"),i=require("../errors");const f=o=>{const e=o?.choices?.[0];if(!e)throw console.error("No choices received from OpenAI"),new i.OpenAIError("UNKNOWN");const n=e?.text??e?.message;if(typeof n!="string")throw console.error("No text received from OpenAI choice"),new i.OpenAIError("UNKNOWN");return n},l=async function*(o){const e=t.DECODER.decode(o),s=JSON.parse(e)?.choices?.[0],{delta:r}=s??{};if(typeof r!="object")throw console.error("Received invalid delta from OpenAI in ChatParser."),new i.OpenAIError("UNKNOWN");const{content:c}=r;c&&(yield t.ENCODER.encode(c))},h=async function*(o){const e=t.DECODER.decode(o),n=JSON.parse(e),s=f(n);yield t.ENCODER.encode(s)},y=async function*(o){const e=new TextEncoder,s=new TextDecoder().decode(o),r=JSON.parse(s),{logprobs:c}=r?.choices?.[0];c&&(yield e.encode(JSON.stringify(c)))};0&&(module.exports={ChatParser,LogprobsParser,TokenParser,getTokensFromResponse});

@@ -1,1 +0,1 @@

const n={chat:"chat/completions",completions:"completions",edits:"edits",embeddings:"embeddings",images:"images","fine-tunes":"fine-tunes"};export*from"./pinned.js";export{n as OpenAIAPIEndpoints};
"use strict";var p=Object.defineProperty;var d=Object.getOwnPropertyDescriptor;var m=Object.getOwnPropertyNames;var A=Object.prototype.hasOwnProperty;var C=(t,e)=>{for(var n in e)p(t,n,{get:e[n],enumerable:!0})},i=(t,e,n,a)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of m(e))!A.call(t,s)&&s!==n&&p(t,s,{get:()=>e[s],enumerable:!(a=d(e,s))||a.enumerable});return t},r=(t,e,n)=>(i(t,e,"default"),n&&i(n,e,"default"));var I=t=>i(p({},"__esModule",{value:!0}),t);var o={};C(o,{OpenAIAPIEndpoints:()=>O});module.exports=I(o);r(o,require("./pinned"),module.exports);const O={chat:"chat/completions",completions:"completions",edits:"edits",embeddings:"embeddings",images:"images","fine-tunes":"fine-tunes"};0&&(module.exports={OpenAIAPIEndpoints,...require("./pinned")});
{
"type": "module"
"type": "commonjs"
}

@@ -6,3 +6,3 @@ {

"homepage": "https://github.com/SpellcraftAI/openai-streams",
"version": "5.9.0",
"version": "5.9.1-cjs.0",
"license": "MIT",

@@ -9,0 +9,0 @@ "type": "module",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc