New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

langbase

Package Overview
Dependencies
Maintainers
1
Versions
51
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

langbase - npm Package Compare versions

Comparing version 0.6.0 to 1.0.0

37

dist/index.d.ts

@@ -66,2 +66,4 @@ import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';

variables?: Variable[];
threadId?: string;
chat?: boolean;
}

@@ -71,4 +73,6 @@ interface StreamOptions {

variables?: Variable[];
threadId?: string | null;
chat?: boolean;
}
interface ChoiceNonStream {
interface ChoiceGenerate {
index: number;

@@ -95,16 +99,19 @@ message: Message;

}
interface GenerateNonStreamResponse {
interface GenerateResponse {
completion: string;
raw: {
id: string;
object: string;
created: number;
model: string;
choices: ChoiceNonStream[];
usage: Usage;
system_fingerprint: string | null;
};
threadId?: string;
id: string;
object: string;
created: number;
model: string;
choices: ChoiceGenerate[];
usage: Usage;
system_fingerprint: string | null;
}
type GenerateStreamResponse = Stream<GenerateStreamChunk>;
interface GenerateStreamChunk {
type StreamText = Stream<StreamChunk>;
interface StreamResponse {
stream: StreamText;
threadId: string | null;
}
interface StreamChunk {
id: string;

@@ -123,6 +130,6 @@ object: string;

constructor(options: PipeOptions);
generateText(options: GenerateOptions): Promise<GenerateNonStreamResponse>;
streamText(options: StreamOptions): Promise<GenerateStreamResponse>;
generateText(options: GenerateOptions): Promise<GenerateResponse>;
streamText(options: StreamOptions): Promise<StreamResponse>;
}
export { Pipe, type PipeOptions, fromReadableStream };

@@ -568,17 +568,9 @@ "use strict";

}
async send(options) {
const url = `${this.config.baseUrl}${options.endpoint}`;
const headers = {
"Content-Type": "application/json",
Authorization: `Bearer ${this.config.apiKey}`,
...options.headers
};
// Main send function
async send({ endpoint, ...options }) {
const url = this.buildUrl({ endpoint });
const headers = this.buildHeaders({ headers: options.headers });
let response;
try {
response = await fetch(url, {
method: options.method,
headers,
body: JSON.stringify(options.body),
signal: AbortSignal.timeout(this.config.timeout || 3e4)
});
response = await this.makeRequest({ url, options, headers });
} catch (error) {

@@ -590,22 +582,77 @@ throw new APIConnectionError({

if (!response.ok) {
let errorBody;
try {
errorBody = await response.json();
} catch (e) {
errorBody = await response.text();
}
throw APIError.generate(
response.status,
errorBody,
response.statusText,
response.headers
);
await this.handleErrorResponse({ response });
}
if (options.stream) {
const controller = new AbortController();
return Stream.fromSSEResponse(response, controller);
} else {
return response.json();
const threadId = response.headers.get("lb-thread-id");
if (options.body.stream) {
return this.handleStreamResponse({ response });
}
return this.handleGenerateResponse({
response,
isChat: options.body.chat,
threadId
});
}
buildUrl({ endpoint }) {
return `${this.config.baseUrl}${endpoint}`;
}
buildHeaders({
headers
}) {
return {
"Content-Type": "application/json",
Authorization: `Bearer ${this.config.apiKey}`,
...headers
};
}
async makeRequest({
url,
options,
headers
}) {
return fetch(url, {
method: options.method,
headers,
body: JSON.stringify(options.body),
signal: AbortSignal.timeout(this.config.timeout || 3e4)
});
}
async handleErrorResponse({
response
}) {
let errorBody;
try {
errorBody = await response.json();
} catch (e) {
errorBody = await response.text();
}
throw APIError.generate(
response.status,
errorBody,
response.statusText,
response.headers
);
}
handleStreamResponse({ response }) {
const controller = new AbortController();
const stream = Stream.fromSSEResponse(response, controller);
return { stream, threadId: response.headers.get("lb-thread-id") };
}
async handleGenerateResponse({
response,
isChat,
threadId
}) {
const generateResponse = await response.json();
const buildResponse = generateResponse.raw ? {
completion: generateResponse.completion,
...generateResponse.raw
} : generateResponse;
if (isChat && threadId) {
return {
threadId,
...buildResponse
};
}
return buildResponse;
}
async post(options) {

@@ -633,5 +680,4 @@ return this.send({ ...options, method: "POST" });

return this.request.post({
endpoint: "/beta/generate",
body: { ...options, stream: false },
stream: false
endpoint: options.chat ? "/beta/chat" : "/beta/generate",
body: { ...options, stream: false }
});

@@ -641,6 +687,4 @@ }

return this.request.post({
endpoint: "/beta/generate",
body: { ...options, stream: true },
stream: true
// TODO: @ahmadbilaldev - why we need to add here as well?
endpoint: options.chat ? "/beta/chat" : "/beta/generate",
body: { ...options, stream: true }
});

@@ -647,0 +691,0 @@ }

{
"name": "langbase",
"version": "0.6.0",
"version": "1.0.0",
"license": "Apache-2.0",

@@ -28,4 +28,4 @@ "sideEffects": false,

"vitest": "1.6.0",
"@langbase/eslint-config": "0.1.0",
"@langbase/tsconfig": "0.1.0"
"@langbase/eslint-config": "1.0.0",
"@langbase/tsconfig": "1.0.0"
},

@@ -73,3 +73,4 @@ "publishConfig": {

"prettier-check": "prettier --check \"./**/*.ts*\"",
"test": "pnpm test:node && pnpm test:edge && pnpm test:ui && pnpm test:e2e",
"test": "pnpm test:node && pnpm test:edge",
"#test": "pnpm test:node && pnpm test:edge && pnpm test:ui && pnpm test:e2e",
"test:edge": "vitest --config vitest.edge.config.js --run",

@@ -76,0 +77,0 @@ "test:node": "vitest --config vitest.node.config.js --run",

@@ -7,7 +7,8 @@ # Langbase SDK

Check the [Langbase SDK documentation](https://langbase.com/docs/langbase-sdk/overview) for more details.
> [!NOTE]
> Check the [Langbase SDK documentation](https://langbase.com/docs/langbase-sdk/overview) for more details.
The following examples are for reference only. Prefer docs for the latest information.
## Getting Started with `langbase` SDK
## Getting started with `langbase` SDK

@@ -34,89 +35,8 @@ ### Installation

### Usage
## Documentation
You can [`generateText`](https://langbase.com/docs/langbase-sdk/generate-text) or [`streamText`](https://langbase.com/docs/langbase-sdk/stream-text) based on the type of a pipe.
Please read the [SDK documentation](https://langbase.com/docs/langbase-sdk/overview) →
Check our [SDK documentation](https://langbase.com/docs/langbase-sdk/overview) for more details.
## Examples
### Example projects
Check the following examples:
- [Node: Generate Text](https://github.com/LangbaseInc/langbase-sdk/blob/main/examples/everything/generate-text.ts)
- [Node: Stream Text](https://github.com/LangbaseInc/langbase-sdk/blob/main/examples/everything/stream-text.ts)
- [Next.js Example](https://github.com/LangbaseInc/langbase-sdk/tree/main/examples/nextjs)
- TypeScript code
- [React component](https://github.com/LangbaseInc/langbase-sdk/tree/main/examples/nextjs/components/langbase) to display the response
- [API Route handlers](https://github.com/LangbaseInc/langbase-sdk/tree/main/examples/nextjs/app/api/langbase/pipe) to send requests to ⌘ Langbase
### Node.js Example Code
## Node.js Examples
### Add a `.env` file with your Pipe API key
```bash
# Add your Pipe API key here.
LANGBASE_PIPE_API_KEY="pipe_12345`"
```
---
### Generate text [`generateText()`](https://langbase.com/docs/langbase-sdk/generate-text)
For more check the API reference of [`generateText()`](https://langbase.com/docs/langbase-sdk/generate-text)
```ts
import 'dotenv/config';
import {Pipe} from 'langbase';
// 1. Initiate the Pipe.
const pipe = new Pipe({
// Make sure you have a .env file with any pipe you wanna use.
// As a demo we're using a pipe that has less wordy responses.
apiKey: process.env.LANGBASE_PIPE_API_KEY!,
});
// 3. Generate the text by asking a question.
const result = await pipe.generateText({
messages: [{role: 'user', content: 'Who is an AI Engineer?'}],
});
// 4. Done: You got the generated completion.
console.log(result.completion);
```
---
### Stream text [`streamText()`](https://langbase.com/docs/langbase-sdk/stream-text)
For more check the API reference of [`streamText()`](https://langbase.com/docs/langbase-sdk/stream-text)
```ts
import 'dotenv/config';
import {Pipe} from 'langbase';
// 1. Initiate the Pipe.
const pipe = new Pipe({
// Make sure you have a .env file with any pipe you wanna use.
// As a demo we're using a pipe that has less wordy responses.
apiKey: process.env.LANGBASE_PIPE_API_KEY!,
});
// 2. Generate a stream by asking a question
const stream = await pipe.streamText({
messages: [{role: 'user', content: 'Who is an AI Engineer?'}],
});
// 3. Print the stream
for await (const chunk of stream) {
// Streaming text part — a single word or several.
const textPart = chunk.choices[0]?.delta?.content || '';
// Demo: Print the stream — you can use it however.
process.stdout.write(textPart);
}
```
Check out [more examples in the docs](https://langbase.com/docs/langbase-sdk/examples) →
Check out [more examples in the docs](https://langbase.com/docs/langbase-sdk/examples) →

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc