together-ai
Advanced tools
Comparing version 0.6.0-alpha.8 to 0.6.0
@@ -10,3 +10,3 @@ /** | ||
export { type ReadStream as FsReadStream } from 'node:fs'; | ||
export { ReadableStream } from 'web-streams-polyfill'; | ||
export { ReadableStream } from 'node:stream/web'; | ||
@@ -13,0 +13,0 @@ export const fetch: typeof nf.default; |
# Changelog | ||
## 0.6.0 (2024-10-22) | ||
Full Changelog: [v0.6.0-alpha.8...v0.6.0](https://github.com/togethercomputer/together-typescript/compare/v0.6.0-alpha.8...v0.6.0) | ||
### Features | ||
* **api:** api update ([#67](https://github.com/togethercomputer/together-typescript/issues/67)) ([21e06d1](https://github.com/togethercomputer/together-typescript/commit/21e06d173bd0eda660241ebc54c3308b21c9b4ba)) | ||
* **api:** OpenAPI spec update via Stainless API ([#55](https://github.com/togethercomputer/together-typescript/issues/55)) ([ebe1c62](https://github.com/togethercomputer/together-typescript/commit/ebe1c628baac31d27aca53ef23531e8585b3c7af)) | ||
* **api:** OpenAPI spec update via Stainless API ([#62](https://github.com/togethercomputer/together-typescript/issues/62)) ([b6af173](https://github.com/togethercomputer/together-typescript/commit/b6af17302473f7ee3ed8eea2671d6920f6a6edc2)) | ||
### Bug Fixes | ||
* **client:** correct File construction from node-fetch Responses ([#54](https://github.com/togethercomputer/together-typescript/issues/54)) ([e1d5c6b](https://github.com/togethercomputer/together-typescript/commit/e1d5c6bd0ef8985a9525a82a31f2ac3385e6e2bd)) | ||
* **errors:** pass message through to APIConnectionError ([#60](https://github.com/togethercomputer/together-typescript/issues/60)) ([0d0ede4](https://github.com/togethercomputer/together-typescript/commit/0d0ede46e8e34c64a65fe79790cecfca3277e6b3)) | ||
* **uploads:** avoid making redundant memory copies ([#57](https://github.com/togethercomputer/together-typescript/issues/57)) ([e88f744](https://github.com/togethercomputer/together-typescript/commit/e88f7449116fa4237a9601e1fd87f939b6678041)) | ||
### Chores | ||
* better object fallback behaviour for casting errors ([#61](https://github.com/togethercomputer/together-typescript/issues/61)) ([bad19ff](https://github.com/togethercomputer/together-typescript/commit/bad19ffd41ab7223845b9763346e49e5364d8182)) | ||
* **ci:** install deps via ./script/bootstrap ([#52](https://github.com/togethercomputer/together-typescript/issues/52)) ([a22842a](https://github.com/togethercomputer/together-typescript/commit/a22842aaff23412ff69e4aea0ff99eb16bd8408c)) | ||
* **internal:** codegen related update ([#56](https://github.com/togethercomputer/together-typescript/issues/56)) ([8fd1782](https://github.com/togethercomputer/together-typescript/commit/8fd17821e4ac71372deebc1c08bea38735dbd1e7)) | ||
* **internal:** codegen related update ([#58](https://github.com/togethercomputer/together-typescript/issues/58)) ([e5b82e7](https://github.com/togethercomputer/together-typescript/commit/e5b82e7097f75420acf5b7c3d952ddfbd05a3832)) | ||
* **internal:** codegen related update ([#59](https://github.com/togethercomputer/together-typescript/issues/59)) ([962541e](https://github.com/togethercomputer/together-typescript/commit/962541e18d35c6c91e8710544ffa44693be7c2c6)) | ||
* **internal:** codegen related update ([#63](https://github.com/togethercomputer/together-typescript/issues/63)) ([6093fb9](https://github.com/togethercomputer/together-typescript/commit/6093fb9410ddc0580ffd7634e73cdb4075d94050)) | ||
* **internal:** move LineDecoder to a separate file ([#64](https://github.com/togethercomputer/together-typescript/issues/64)) ([9a5999d](https://github.com/togethercomputer/together-typescript/commit/9a5999d5fcd0dd92b18fd8a31e5db7c5f1eab326)) | ||
* **internal:** pass props through internal parser ([#65](https://github.com/togethercomputer/together-typescript/issues/65)) ([162bc3c](https://github.com/togethercomputer/together-typescript/commit/162bc3c9a02d7c0c878a6c14fc9d711c90282e89)) | ||
## 0.6.0-alpha.8 (2024-08-29) | ||
@@ -4,0 +33,0 @@ |
@@ -25,3 +25,3 @@ | ||
constructor(responsePromise: Promise<APIResponseProps>, parseResponse?: (props: APIResponseProps) => PromiseOrValue<T>); | ||
_thenUnwrap<U>(transform: (data: T) => U): APIPromise<U>; | ||
_thenUnwrap<U>(transform: (data: T, props: APIResponseProps) => U): APIPromise<U>; | ||
/** | ||
@@ -102,3 +102,5 @@ * Gets the raw `Response` instance instead of parsing the response | ||
private calculateContentLength; | ||
buildRequest<Req>(options: FinalRequestOptions<Req>): { | ||
buildRequest<Req>(options: FinalRequestOptions<Req>, { retryCount }?: { | ||
retryCount?: number; | ||
}): { | ||
req: RequestInit; | ||
@@ -237,3 +239,4 @@ url: string; | ||
export declare const isHeadersProtocol: (headers: any) => headers is HeadersProtocol; | ||
export declare const getRequiredHeader: (headers: HeadersLike, header: string) => string; | ||
export declare const getRequiredHeader: (headers: HeadersLike | Headers, header: string) => string; | ||
export declare const getHeader: (headers: HeadersLike | Headers, header: string) => string | undefined; | ||
/** | ||
@@ -240,0 +243,0 @@ * Encodes a string to Base64 format. |
40
core.js
@@ -15,3 +15,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.isObj = exports.toBase64 = exports.getRequiredHeader = exports.isHeadersProtocol = exports.isRunningInBrowser = exports.debug = exports.hasOwn = exports.isEmptyObj = exports.maybeCoerceBoolean = exports.maybeCoerceFloat = exports.maybeCoerceInteger = exports.coerceBoolean = exports.coerceFloat = exports.coerceInteger = exports.readEnv = exports.ensurePresent = exports.castToError = exports.sleep = exports.safeJSON = exports.isRequestOptions = exports.createResponseHeaders = exports.PagePromise = exports.AbstractPage = exports.APIClient = exports.APIPromise = exports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = void 0; | ||
exports.isObj = exports.toBase64 = exports.getHeader = exports.getRequiredHeader = exports.isHeadersProtocol = exports.isRunningInBrowser = exports.debug = exports.hasOwn = exports.isEmptyObj = exports.maybeCoerceBoolean = exports.maybeCoerceFloat = exports.maybeCoerceInteger = exports.coerceBoolean = exports.coerceFloat = exports.coerceInteger = exports.readEnv = exports.ensurePresent = exports.castToError = exports.sleep = exports.safeJSON = exports.isRequestOptions = exports.createResponseHeaders = exports.PagePromise = exports.AbstractPage = exports.APIClient = exports.APIPromise = exports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = void 0; | ||
const version_1 = require("./version.js"); | ||
@@ -72,3 +72,3 @@ const streaming_1 = require("./streaming.js"); | ||
_thenUnwrap(transform) { | ||
return new APIPromise(this.responsePromise, async (props) => transform(await this.parseResponse(props))); | ||
return new APIPromise(this.responsePromise, async (props) => transform(await this.parseResponse(props), props)); | ||
} | ||
@@ -205,3 +205,3 @@ /** | ||
} | ||
buildRequest(options) { | ||
buildRequest(options, { retryCount = 0 } = {}) { | ||
const { method, path, query, headers: headers = {} } = options; | ||
@@ -233,3 +233,3 @@ const body = ArrayBuffer.isView(options.body) || (options.__binaryRequest && typeof options.body === 'string') ? | ||
} | ||
const reqHeaders = this.buildHeaders({ options, headers, contentLength }); | ||
const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); | ||
const req = { | ||
@@ -246,3 +246,3 @@ method, | ||
} | ||
buildHeaders({ options, headers, contentLength, }) { | ||
buildHeaders({ options, headers, contentLength, retryCount, }) { | ||
const reqHeaders = {}; | ||
@@ -259,2 +259,9 @@ if (contentLength) { | ||
} | ||
// Don't set the retry count header if it was already set or removed through default headers or by the | ||
// caller. We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to | ||
// account for the removal case. | ||
if ((0, exports.getHeader)(defaultHeaders, 'x-stainless-retry-count') === undefined && | ||
(0, exports.getHeader)(headers, 'x-stainless-retry-count') === undefined) { | ||
reqHeaders['x-stainless-retry-count'] = String(retryCount); | ||
} | ||
this.validateHeaders(reqHeaders, headers); | ||
@@ -288,7 +295,8 @@ return reqHeaders; | ||
const options = await optionsInput; | ||
const maxRetries = options.maxRetries ?? this.maxRetries; | ||
if (retriesRemaining == null) { | ||
retriesRemaining = options.maxRetries ?? this.maxRetries; | ||
retriesRemaining = maxRetries; | ||
} | ||
await this.prepareOptions(options); | ||
const { req, url, timeout } = this.buildRequest(options); | ||
const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); | ||
await this.prepareRequest(req, { url, options }); | ||
@@ -716,2 +724,8 @@ debug('request', url, options, req.headers); | ||
return err; | ||
if (typeof err === 'object' && err !== null) { | ||
try { | ||
return new Error(JSON.stringify(err)); | ||
} | ||
catch { } | ||
} | ||
return new Error(err); | ||
@@ -855,2 +869,10 @@ }; | ||
const getRequiredHeader = (headers, header) => { | ||
const foundHeader = (0, exports.getHeader)(headers, header); | ||
if (foundHeader === undefined) { | ||
throw new Error(`Could not find ${header} header`); | ||
} | ||
return foundHeader; | ||
}; | ||
exports.getRequiredHeader = getRequiredHeader; | ||
const getHeader = (headers, header) => { | ||
const lowerCasedHeader = header.toLowerCase(); | ||
@@ -879,5 +901,5 @@ if ((0, exports.isHeadersProtocol)(headers)) { | ||
} | ||
throw new Error(`Could not find ${header} header`); | ||
return undefined; | ||
}; | ||
exports.getRequiredHeader = getRequiredHeader; | ||
exports.getHeader = getHeader; | ||
/** | ||
@@ -884,0 +906,0 @@ * Encodes a string to Base64 format. |
@@ -21,3 +21,3 @@ import { Headers } from "./core.js"; | ||
constructor({ message, cause }: { | ||
message?: string; | ||
message?: string | undefined; | ||
cause?: Error | undefined; | ||
@@ -24,0 +24,0 @@ }); |
@@ -36,3 +36,3 @@ "use strict"; | ||
if (!status) { | ||
return new APIConnectionError({ cause: (0, core_1.castToError)(errorResponse) }); | ||
return new APIConnectionError({ message, cause: (0, core_1.castToError)(errorResponse) }); | ||
} | ||
@@ -39,0 +39,0 @@ const error = errorResponse; |
{ | ||
"name": "together-ai", | ||
"version": "0.6.0-alpha.8", | ||
"version": "0.6.0", | ||
"description": "The official TypeScript library for the Together API", | ||
@@ -5,0 +5,0 @@ "author": "Together <dev-feedback@TogetherAI.com>", |
@@ -335,1 +335,5 @@ # Together Node API Library | ||
If you are interested in other runtime environments, please open or upvote an issue on GitHub. | ||
## Contributing | ||
See [the contributing documentation](./CONTRIBUTING.md). |
@@ -69,2 +69,3 @@ import { APIResource } from "../../resource.js"; | ||
logprobs?: number | null; | ||
seed?: number | null; | ||
} | ||
@@ -215,2 +216,6 @@ namespace Choice { | ||
/** | ||
* Seed value for reproducibility. | ||
*/ | ||
seed?: number; | ||
/** | ||
* A list of string sequences that will truncate (stop) inference text output. For | ||
@@ -217,0 +222,0 @@ * example, "</s>" will stop generation as soon as the model generates the given |
@@ -28,2 +28,3 @@ import { APIResource } from "../resource.js"; | ||
logprobs?: CompletionsAPI.LogProbs; | ||
seed?: number; | ||
text?: string; | ||
@@ -134,2 +135,6 @@ } | ||
/** | ||
* Seed value for reproducibility. | ||
*/ | ||
seed?: number; | ||
/** | ||
* A list of string sequences that will truncate (stop) inference text output. For | ||
@@ -136,0 +141,0 @@ * example, "</s>" will stop generation as soon as the model generates the given |
@@ -40,7 +40,2 @@ import { APIResource } from "../resource.js"; | ||
learning_rate?: number; | ||
lora?: boolean; | ||
lora_alpha?: number; | ||
lora_dropout?: number; | ||
lora_r?: number; | ||
lora_trainable_modules?: string; | ||
model?: string; | ||
@@ -57,2 +52,3 @@ model_output_name?: string; | ||
training_file?: string; | ||
training_type?: FineTune.FullTrainingType | FineTune.LoRaTrainingType; | ||
trainingfile_numlines?: number; | ||
@@ -64,2 +60,3 @@ trainingfile_size?: number; | ||
wandb_url?: string; | ||
warmup_ratio?: number; | ||
} | ||
@@ -78,2 +75,12 @@ export declare namespace FineTune { | ||
} | ||
interface FullTrainingType { | ||
type: 'Full'; | ||
} | ||
interface LoRaTrainingType { | ||
lora_alpha: number; | ||
lora_r: number; | ||
type: 'Lora'; | ||
lora_dropout?: number; | ||
lora_trainable_modules?: string; | ||
} | ||
} | ||
@@ -129,23 +136,2 @@ export interface FineTuneEvent { | ||
/** | ||
* Whether to enable LoRA training. If not provided, full fine-tuning will be | ||
* applied. | ||
*/ | ||
lora?: boolean; | ||
/** | ||
* The alpha value for LoRA adapter training. | ||
*/ | ||
lora_alpha?: number; | ||
/** | ||
* The dropout probability for Lora layers. | ||
*/ | ||
lora_dropout?: number; | ||
/** | ||
* Rank for LoRA adapter weights | ||
*/ | ||
lora_r?: number; | ||
/** | ||
* A list of LoRA trainable modules, separated by a comma | ||
*/ | ||
lora_trainable_modules?: string; | ||
/** | ||
* Number of checkpoints to save during fine-tuning | ||
@@ -166,2 +152,3 @@ */ | ||
suffix?: string; | ||
training_type?: FineTuneCreateParams.FullTrainingType | FineTuneCreateParams.LoRaTrainingType; | ||
/** | ||
@@ -175,3 +162,20 @@ * File-ID of a validation file uploaded to the Together API | ||
wandb_api_key?: string; | ||
/** | ||
* The percent of steps at the start of training to linearly increase the | ||
* learning-rate. | ||
*/ | ||
warmup_ratio?: number; | ||
} | ||
export declare namespace FineTuneCreateParams { | ||
interface FullTrainingType { | ||
type: 'Full'; | ||
} | ||
interface LoRaTrainingType { | ||
lora_alpha: number; | ||
lora_r: number; | ||
type: 'Lora'; | ||
lora_dropout?: number; | ||
lora_trainable_modules?: string; | ||
} | ||
} | ||
export interface FineTuneDownloadParams { | ||
@@ -178,0 +182,0 @@ /** |
import * as types from "../_shims/node-types.js"; | ||
@@ -5,0 +6,0 @@ declare module '../_shims/manual-types' { |
@@ -10,3 +10,3 @@ /** | ||
export { type ReadStream as FsReadStream } from 'node:fs'; | ||
export { ReadableStream } from 'web-streams-polyfill'; | ||
export { ReadableStream } from 'node:stream/web'; | ||
@@ -13,0 +13,0 @@ export const fetch: typeof nf.default; |
@@ -101,4 +101,6 @@ import { VERSION } from './version'; | ||
_thenUnwrap<U>(transform: (data: T) => U): APIPromise<U> { | ||
return new APIPromise(this.responsePromise, async (props) => transform(await this.parseResponse(props))); | ||
_thenUnwrap<U>(transform: (data: T, props: APIResponseProps) => U): APIPromise<U> { | ||
return new APIPromise(this.responsePromise, async (props) => | ||
transform(await this.parseResponse(props), props), | ||
); | ||
} | ||
@@ -292,3 +294,6 @@ | ||
buildRequest<Req>(options: FinalRequestOptions<Req>): { req: RequestInit; url: string; timeout: number } { | ||
buildRequest<Req>( | ||
options: FinalRequestOptions<Req>, | ||
{ retryCount = 0 }: { retryCount?: number } = {}, | ||
): { req: RequestInit; url: string; timeout: number } { | ||
const { method, path, query, headers: headers = {} } = options; | ||
@@ -325,3 +330,3 @@ | ||
const reqHeaders = this.buildHeaders({ options, headers, contentLength }); | ||
const reqHeaders = this.buildHeaders({ options, headers, contentLength, retryCount }); | ||
@@ -345,2 +350,3 @@ const req: RequestInit = { | ||
contentLength, | ||
retryCount, | ||
}: { | ||
@@ -350,2 +356,3 @@ options: FinalRequestOptions; | ||
contentLength: string | null | undefined; | ||
retryCount: number; | ||
}): Record<string, string> { | ||
@@ -366,2 +373,12 @@ const reqHeaders: Record<string, string> = {}; | ||
// Don't set the retry count header if it was already set or removed through default headers or by the | ||
// caller. We check `defaultHeaders` and `headers`, which can contain nulls, instead of `reqHeaders` to | ||
// account for the removal case. | ||
if ( | ||
getHeader(defaultHeaders, 'x-stainless-retry-count') === undefined && | ||
getHeader(headers, 'x-stainless-retry-count') === undefined | ||
) { | ||
reqHeaders['x-stainless-retry-count'] = String(retryCount); | ||
} | ||
this.validateHeaders(reqHeaders, headers); | ||
@@ -418,4 +435,5 @@ | ||
const options = await optionsInput; | ||
const maxRetries = options.maxRetries ?? this.maxRetries; | ||
if (retriesRemaining == null) { | ||
retriesRemaining = options.maxRetries ?? this.maxRetries; | ||
retriesRemaining = maxRetries; | ||
} | ||
@@ -425,3 +443,3 @@ | ||
const { req, url, timeout } = this.buildRequest(options); | ||
const { req, url, timeout } = this.buildRequest(options, { retryCount: maxRetries - retriesRemaining }); | ||
@@ -1005,2 +1023,7 @@ await this.prepareRequest(req, { url, options }); | ||
if (err instanceof Error) return err; | ||
if (typeof err === 'object' && err !== null) { | ||
try { | ||
return new Error(JSON.stringify(err)); | ||
} catch {} | ||
} | ||
return new Error(err); | ||
@@ -1143,3 +1166,11 @@ }; | ||
export const getRequiredHeader = (headers: HeadersLike, header: string): string => { | ||
export const getRequiredHeader = (headers: HeadersLike | Headers, header: string): string => { | ||
const foundHeader = getHeader(headers, header); | ||
if (foundHeader === undefined) { | ||
throw new Error(`Could not find ${header} header`); | ||
} | ||
return foundHeader; | ||
}; | ||
export const getHeader = (headers: HeadersLike | Headers, header: string): string | undefined => { | ||
const lowerCasedHeader = header.toLowerCase(); | ||
@@ -1170,3 +1201,3 @@ if (isHeadersProtocol(headers)) { | ||
throw new Error(`Could not find ${header} header`); | ||
return undefined; | ||
}; | ||
@@ -1173,0 +1204,0 @@ |
@@ -52,3 +52,3 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. | ||
if (!status) { | ||
return new APIConnectionError({ cause: castToError(errorResponse) }); | ||
return new APIConnectionError({ message, cause: castToError(errorResponse) }); | ||
} | ||
@@ -105,3 +105,3 @@ | ||
constructor({ message, cause }: { message?: string; cause?: Error | undefined }) { | ||
constructor({ message, cause }: { message?: string | undefined; cause?: Error | undefined }) { | ||
super(undefined, undefined, message || 'Connection error.', undefined); | ||
@@ -108,0 +108,0 @@ // in some environments the 'cause' property is already declared |
@@ -118,2 +118,4 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. | ||
logprobs?: number | null; | ||
seed?: number | null; | ||
} | ||
@@ -320,2 +322,7 @@ | ||
/** | ||
* Seed value for reproducibility. | ||
*/ | ||
seed?: number; | ||
/** | ||
* A list of string sequences that will truncate (stop) inference text output. For | ||
@@ -322,0 +329,0 @@ * example, "</s>" will stop generation as soon as the model generates the given |
@@ -55,2 +55,4 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. | ||
seed?: number; | ||
text?: string; | ||
@@ -191,2 +193,7 @@ } | ||
/** | ||
* Seed value for reproducibility. | ||
*/ | ||
seed?: number; | ||
/** | ||
* A list of string sequences that will truncate (stop) inference text output. For | ||
@@ -193,0 +200,0 @@ * example, "</s>" will stop generation as soon as the model generates the given |
@@ -82,12 +82,2 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. | ||
lora?: boolean; | ||
lora_alpha?: number; | ||
lora_dropout?: number; | ||
lora_r?: number; | ||
lora_trainable_modules?: string; | ||
model?: string; | ||
@@ -115,2 +105,4 @@ | ||
training_type?: FineTune.FullTrainingType | FineTune.LoRaTrainingType; | ||
trainingfile_numlines?: number; | ||
@@ -127,2 +119,4 @@ | ||
wandb_url?: string; | ||
warmup_ratio?: number; | ||
} | ||
@@ -175,2 +169,18 @@ | ||
} | ||
export interface FullTrainingType { | ||
type: 'Full'; | ||
} | ||
export interface LoRaTrainingType { | ||
lora_alpha: number; | ||
lora_r: number; | ||
type: 'Lora'; | ||
lora_dropout?: number; | ||
lora_trainable_modules?: string; | ||
} | ||
} | ||
@@ -277,28 +287,2 @@ | ||
/** | ||
* Whether to enable LoRA training. If not provided, full fine-tuning will be | ||
* applied. | ||
*/ | ||
lora?: boolean; | ||
/** | ||
* The alpha value for LoRA adapter training. | ||
*/ | ||
lora_alpha?: number; | ||
/** | ||
* The dropout probability for Lora layers. | ||
*/ | ||
lora_dropout?: number; | ||
/** | ||
* Rank for LoRA adapter weights | ||
*/ | ||
lora_r?: number; | ||
/** | ||
* A list of LoRA trainable modules, separated by a comma | ||
*/ | ||
lora_trainable_modules?: string; | ||
/** | ||
* Number of checkpoints to save during fine-tuning | ||
@@ -323,2 +307,4 @@ */ | ||
training_type?: FineTuneCreateParams.FullTrainingType | FineTuneCreateParams.LoRaTrainingType; | ||
/** | ||
@@ -333,4 +319,28 @@ * File-ID of a validation file uploaded to the Together API | ||
wandb_api_key?: string; | ||
/** | ||
* The percent of steps at the start of training to linearly increase the | ||
* learning-rate. | ||
*/ | ||
warmup_ratio?: number; | ||
} | ||
export namespace FineTuneCreateParams { | ||
export interface FullTrainingType { | ||
type: 'Full'; | ||
} | ||
export interface LoRaTrainingType { | ||
lora_alpha: number; | ||
lora_r: number; | ||
type: 'Lora'; | ||
lora_dropout?: number; | ||
lora_trainable_modules?: string; | ||
} | ||
} | ||
export interface FineTuneDownloadParams { | ||
@@ -337,0 +347,0 @@ /** |
import { ReadableStream, type Response } from './_shims/index'; | ||
import { TogetherError } from './error'; | ||
import { LineDecoder } from './internal/decoders/line'; | ||
@@ -332,113 +333,2 @@ import { APIError } from "./error"; | ||
/** | ||
* A re-implementation of httpx's `LineDecoder` in Python that handles incrementally | ||
* reading lines from text. | ||
* | ||
* https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 | ||
*/ | ||
class LineDecoder { | ||
// prettier-ignore | ||
static NEWLINE_CHARS = new Set(['\n', '\r']); | ||
static NEWLINE_REGEXP = /\r\n|[\n\r]/g; | ||
buffer: string[]; | ||
trailingCR: boolean; | ||
textDecoder: any; // TextDecoder found in browsers; not typed to avoid pulling in either "dom" or "node" types. | ||
constructor() { | ||
this.buffer = []; | ||
this.trailingCR = false; | ||
} | ||
decode(chunk: Bytes): string[] { | ||
let text = this.decodeText(chunk); | ||
if (this.trailingCR) { | ||
text = '\r' + text; | ||
this.trailingCR = false; | ||
} | ||
if (text.endsWith('\r')) { | ||
this.trailingCR = true; | ||
text = text.slice(0, -1); | ||
} | ||
if (!text) { | ||
return []; | ||
} | ||
const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ''); | ||
let lines = text.split(LineDecoder.NEWLINE_REGEXP); | ||
// if there is a trailing new line then the last entry will be an empty | ||
// string which we don't care about | ||
if (trailingNewline) { | ||
lines.pop(); | ||
} | ||
if (lines.length === 1 && !trailingNewline) { | ||
this.buffer.push(lines[0]!); | ||
return []; | ||
} | ||
if (this.buffer.length > 0) { | ||
lines = [this.buffer.join('') + lines[0], ...lines.slice(1)]; | ||
this.buffer = []; | ||
} | ||
if (!trailingNewline) { | ||
this.buffer = [lines.pop() || '']; | ||
} | ||
return lines; | ||
} | ||
decodeText(bytes: Bytes): string { | ||
if (bytes == null) return ''; | ||
if (typeof bytes === 'string') return bytes; | ||
// Node: | ||
if (typeof Buffer !== 'undefined') { | ||
if (bytes instanceof Buffer) { | ||
return bytes.toString(); | ||
} | ||
if (bytes instanceof Uint8Array) { | ||
return Buffer.from(bytes).toString(); | ||
} | ||
throw new TogetherError( | ||
`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`, | ||
); | ||
} | ||
// Browser | ||
if (typeof TextDecoder !== 'undefined') { | ||
if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { | ||
this.textDecoder ??= new TextDecoder('utf8'); | ||
return this.textDecoder.decode(bytes); | ||
} | ||
throw new TogetherError( | ||
`Unexpected: received non-Uint8Array/ArrayBuffer (${ | ||
(bytes as any).constructor.name | ||
}) in a web platform. Please report this error.`, | ||
); | ||
} | ||
throw new TogetherError( | ||
`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`, | ||
); | ||
} | ||
flush(): string[] { | ||
if (!this.buffer.length && !this.trailingCR) { | ||
return []; | ||
} | ||
const lines = [this.buffer.join('')]; | ||
this.buffer = []; | ||
this.trailingCR = false; | ||
return lines; | ||
} | ||
} | ||
/** This is an internal helper function that's just used for testing */ | ||
@@ -445,0 +335,0 @@ export function _decodeChunks(chunks: string[]): string[] { |
@@ -110,4 +110,6 @@ import { type RequestOptions } from './core'; | ||
// Use the file's options if there isn't one provided | ||
options ??= isFileLike(value) ? { lastModified: value.lastModified, type: value.type } : {}; | ||
// If we've been given a `File` we don't need to do anything | ||
if (isFileLike(value)) { | ||
return value; | ||
} | ||
@@ -118,3 +120,8 @@ if (isResponseLike(value)) { | ||
return new File([blob as any], name, options); | ||
// we need to convert the `Blob` into an array buffer because the `Blob` class | ||
// that `node-fetch` defines is incompatible with the web standard which results | ||
// in `new File` interpreting it as a string instead of binary data. | ||
const data = isBlobLike(blob) ? [(await blob.arrayBuffer()) as any] : [blob]; | ||
return new File(data, name, options); | ||
} | ||
@@ -126,3 +133,3 @@ | ||
if (!options.type) { | ||
if (!options?.type) { | ||
const type = (bits[0] as any)?.type; | ||
@@ -129,0 +136,0 @@ if (typeof type === 'string') { |
@@ -1,1 +0,1 @@ | ||
export const VERSION = '0.6.0-alpha.8'; // x-release-please-version | ||
export const VERSION = '0.6.0'; // x-release-please-version |
@@ -6,2 +6,3 @@ "use strict"; | ||
const error_1 = require("./error.js"); | ||
const line_1 = require("./internal/decoders/line.js"); | ||
const error_2 = require("together-ai/error"); | ||
@@ -68,3 +69,3 @@ class Stream { | ||
async function* iterLines() { | ||
const lineDecoder = new LineDecoder(); | ||
const lineDecoder = new line_1.LineDecoder(); | ||
const iter = readableStreamAsyncIterable(readableStream); | ||
@@ -175,3 +176,3 @@ for await (const chunk of iter) { | ||
const sseDecoder = new SSEDecoder(); | ||
const lineDecoder = new LineDecoder(); | ||
const lineDecoder = new line_1.LineDecoder(); | ||
const iter = readableStreamAsyncIterable(response.body); | ||
@@ -286,87 +287,5 @@ for await (const sseChunk of iterSSEChunks(iter)) { | ||
} | ||
/** | ||
* A re-implementation of httpx's `LineDecoder` in Python that handles incrementally | ||
* reading lines from text. | ||
* | ||
* https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 | ||
*/ | ||
class LineDecoder { | ||
constructor() { | ||
this.buffer = []; | ||
this.trailingCR = false; | ||
} | ||
decode(chunk) { | ||
let text = this.decodeText(chunk); | ||
if (this.trailingCR) { | ||
text = '\r' + text; | ||
this.trailingCR = false; | ||
} | ||
if (text.endsWith('\r')) { | ||
this.trailingCR = true; | ||
text = text.slice(0, -1); | ||
} | ||
if (!text) { | ||
return []; | ||
} | ||
const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ''); | ||
let lines = text.split(LineDecoder.NEWLINE_REGEXP); | ||
// if there is a trailing new line then the last entry will be an empty | ||
// string which we don't care about | ||
if (trailingNewline) { | ||
lines.pop(); | ||
} | ||
if (lines.length === 1 && !trailingNewline) { | ||
this.buffer.push(lines[0]); | ||
return []; | ||
} | ||
if (this.buffer.length > 0) { | ||
lines = [this.buffer.join('') + lines[0], ...lines.slice(1)]; | ||
this.buffer = []; | ||
} | ||
if (!trailingNewline) { | ||
this.buffer = [lines.pop() || '']; | ||
} | ||
return lines; | ||
} | ||
decodeText(bytes) { | ||
if (bytes == null) | ||
return ''; | ||
if (typeof bytes === 'string') | ||
return bytes; | ||
// Node: | ||
if (typeof Buffer !== 'undefined') { | ||
if (bytes instanceof Buffer) { | ||
return bytes.toString(); | ||
} | ||
if (bytes instanceof Uint8Array) { | ||
return Buffer.from(bytes).toString(); | ||
} | ||
throw new error_1.TogetherError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); | ||
} | ||
// Browser | ||
if (typeof TextDecoder !== 'undefined') { | ||
if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { | ||
this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8')); | ||
return this.textDecoder.decode(bytes); | ||
} | ||
throw new error_1.TogetherError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); | ||
} | ||
throw new error_1.TogetherError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); | ||
} | ||
flush() { | ||
if (!this.buffer.length && !this.trailingCR) { | ||
return []; | ||
} | ||
const lines = [this.buffer.join('')]; | ||
this.buffer = []; | ||
this.trailingCR = false; | ||
return lines; | ||
} | ||
} | ||
// prettier-ignore | ||
LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r']); | ||
LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r]/g; | ||
/** This is an internal helper function that's just used for testing */ | ||
function _decodeChunks(chunks) { | ||
const decoder = new LineDecoder(); | ||
const decoder = new line_1.LineDecoder(); | ||
const lines = []; | ||
@@ -373,0 +292,0 @@ for (const chunk of chunks) { |
@@ -46,12 +46,18 @@ "use strict"; | ||
value = await value; | ||
// Use the file's options if there isn't one provided | ||
options ?? (options = (0, exports.isFileLike)(value) ? { lastModified: value.lastModified, type: value.type } : {}); | ||
// If we've been given a `File` we don't need to do anything | ||
if ((0, exports.isFileLike)(value)) { | ||
return value; | ||
} | ||
if ((0, exports.isResponseLike)(value)) { | ||
const blob = await value.blob(); | ||
name || (name = new URL(value.url).pathname.split(/[\\/]/).pop() ?? 'unknown_file'); | ||
return new index_1.File([blob], name, options); | ||
// we need to convert the `Blob` into an array buffer because the `Blob` class | ||
// that `node-fetch` defines is incompatible with the web standard which results | ||
// in `new File` interpreting it as a string instead of binary data. | ||
const data = (0, exports.isBlobLike)(blob) ? [(await blob.arrayBuffer())] : [blob]; | ||
return new index_1.File(data, name, options); | ||
} | ||
const bits = await getBytes(value); | ||
name || (name = getName(value) ?? 'unknown_file'); | ||
if (!options.type) { | ||
if (!options?.type) { | ||
const type = bits[0]?.type; | ||
@@ -58,0 +64,0 @@ if (typeof type === 'string') { |
@@ -1,2 +0,2 @@ | ||
export declare const VERSION = "0.6.0-alpha.8"; | ||
export declare const VERSION = "0.6.0"; | ||
//# sourceMappingURL=version.d.ts.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.VERSION = void 0; | ||
exports.VERSION = '0.6.0-alpha.8'; // x-release-please-version | ||
exports.VERSION = '0.6.0'; // x-release-please-version | ||
//# sourceMappingURL=version.js.map |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
874934
291
14040
339