llm-prompt-stream
Advanced tools
Comparing version
@@ -1,3 +0,2 @@ | ||
declare const streamPrompt: (completion: any) => ReadableStream<any> | undefined; | ||
export { streamPrompt }; | ||
export * from "./utils"; | ||
//# sourceMappingURL=index.d.ts.map |
@@ -1,71 +0,2 @@ | ||
"use strict"; | ||
var __defProp = Object.defineProperty; | ||
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; | ||
var __getOwnPropNames = Object.getOwnPropertyNames; | ||
var __hasOwnProp = Object.prototype.hasOwnProperty; | ||
var __export = (target, all) => { | ||
for (var name in all) | ||
__defProp(target, name, { get: all[name], enumerable: true }); | ||
}; | ||
var __copyProps = (to, from, except, desc) => { | ||
if (from && typeof from === "object" || typeof from === "function") { | ||
for (let key of __getOwnPropNames(from)) | ||
if (!__hasOwnProp.call(to, key) && key !== except) | ||
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); | ||
} | ||
return to; | ||
}; | ||
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); | ||
// src/index.ts | ||
var index_exports = {}; | ||
__export(index_exports, { | ||
streamPrompt: () => streamPrompt | ||
}); | ||
module.exports = __toCommonJS(index_exports); | ||
// src/utils.ts | ||
var streamPrompt = (completion) => { | ||
let buffer = ""; | ||
let rawTotalResponse = ""; | ||
let totalFormattedResponse = ""; | ||
try { | ||
const stream = new ReadableStream({ | ||
async start(controller) { | ||
const encoder = new TextEncoder(); | ||
for await (const chunk of completion) { | ||
let content = chunk.choices[0]?.delta?.content; | ||
if (content) { | ||
rawTotalResponse += content; | ||
buffer += content; | ||
let lines = buffer.split(/(?=\n|^#{1,4}|\s-\s|\n\s\*\s|\n\d+\.\s)/); | ||
buffer = ""; | ||
lines.forEach((line, index) => { | ||
if (index === lines.length - 1 && !line.endsWith("\n")) { | ||
buffer = line; | ||
} else { | ||
totalFormattedResponse += line; | ||
controller.enqueue(encoder.encode(line)); | ||
} | ||
}); | ||
} | ||
} | ||
if (buffer) { | ||
totalFormattedResponse += buffer; | ||
controller.enqueue(encoder.encode(buffer)); | ||
} | ||
controller.enqueue(encoder.encode(`event: done | ||
`)); | ||
controller.close(); | ||
} | ||
}); | ||
return stream; | ||
} catch (e) { | ||
console.log(e); | ||
} | ||
}; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
streamPrompt | ||
}); | ||
export * from "./utils"; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "llm-prompt-stream", | ||
"version": "1.0.1", | ||
"version": "1.0.2", | ||
"description": "", | ||
"scripts": { | ||
"build": "tsup src/index.ts --format esm,cjs --dts", | ||
"clean": "rm -rf dist" | ||
"clean": "rm -rf dist", | ||
"test": "vitest", | ||
"test:watch": "vitest --watch" | ||
}, | ||
@@ -14,4 +16,8 @@ "keywords": [], | ||
"@types/node": "^22.13.9", | ||
"dotenv": "^16.4.7", | ||
"happy-dom": "^17.2.2", | ||
"ts-node": "^10.9.2", | ||
"tsup": "^8.4.0", | ||
"typescript": "^5.8.2" | ||
"typescript": "^5.8.2", | ||
"vitest": "^3.0.7" | ||
}, | ||
@@ -23,3 +29,4 @@ "types": "dist/index.d.ts", | ||
"openai": "^4.86.1" | ||
} | ||
}, | ||
"type": "module" | ||
} |
import OpenAI from "openai"; | ||
import * as dotenv from "dotenv"; | ||
import fs from "fs"; | ||
import path from "path"; | ||
dotenv.config(); | ||
export const openAIKey = process.env.OPENAI_API_KEY; | ||
export const streamPrompt = (completion: any) => { | ||
@@ -49,7 +56,10 @@ let buffer = ""; | ||
export const setUpCompletionForStream = async (env: any) => { | ||
const openai = new OpenAI({ apiKey: env.OPENAI_API_KEY }); | ||
export const setUpCompletionForStream = async ( | ||
OPENAI_API_KEY: string, | ||
messages: any = [] | ||
) => { | ||
const openai = new OpenAI({ apiKey: OPENAI_API_KEY }); | ||
const completion = await openai.chat.completions.create({ | ||
stream: true, | ||
messages: [], | ||
messages: messages, | ||
model: "gpt-4o-mini", | ||
@@ -59,1 +69,57 @@ }); | ||
}; | ||
export function* parseMarkdownToCompletions(markdown: string) { | ||
const lines = markdown.split("\n"); | ||
for (const line of lines) { | ||
if (line.trim()) { | ||
yield { choices: [{ delta: { content: line + "\n" } }] }; | ||
} | ||
} | ||
yield { choices: [{ delta: { content: "" } }] }; | ||
} | ||
export async function readStream( | ||
stream: ReadableStream, | ||
createFile: boolean = false, | ||
outputFilename: string = "response.md" | ||
): Promise<string> { | ||
if (!stream || !(stream instanceof ReadableStream)) { | ||
throw new Error("❌ Invalid stream provided to readStream."); | ||
} | ||
const reader = stream.getReader(); | ||
const decoder = new TextDecoder(); | ||
let fullResponse = ""; | ||
const filePath = path.resolve(outputFilename); | ||
let fileStream = null; | ||
if (createFile) { | ||
fileStream = fs.createWriteStream(filePath, { encoding: "utf-8" }); | ||
} | ||
let done = false; | ||
while (!done) { | ||
const { value, done: readerDone } = await reader.read(); | ||
done = readerDone; | ||
if (value) { | ||
const chunk = decoder.decode(value, { stream: true }); | ||
fullResponse += chunk; | ||
if (createFile && fileStream) { | ||
fileStream.write(chunk); | ||
} | ||
} | ||
} | ||
if (fileStream) { | ||
fileStream.end(); | ||
} | ||
return fullResponse; | ||
} | ||
export const createCompletionAndStream = async ( | ||
openAIKey: string, | ||
messages: any | ||
) => { | ||
const completion = await setUpCompletionForStream(openAIKey, messages); | ||
return streamPrompt(completion); | ||
}; |
{ | ||
"compilerOptions": { | ||
"module": "CommonJS", | ||
"target": "ESNext", | ||
"outDir": "dist", | ||
"declaration": true, | ||
"declarationMap": true, | ||
"module": "ESNext", | ||
"target": "ESNext", | ||
"lib": ["ESNext", "DOM"], | ||
"sourceMap": true, | ||
"rootDir": "src", | ||
"strict": true, | ||
"moduleResolution": "node" | ||
"esModuleInterop": true | ||
}, | ||
"include": ["src"], | ||
"exclude": ["node_modules", "dist"] | ||
"include": ["src"] | ||
} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
22358
201.2%34
277.78%434
95.5%Yes
NaN7
133.33%3
200%1
Infinity%