Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@turbopuffer/turbopuffer

Package Overview
Dependencies
Maintainers
0
Versions
22
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@turbopuffer/turbopuffer - npm Package Compare versions

Comparing version 0.5.6 to 0.5.7

5

dist/httpClient.d.ts

@@ -1,2 +0,1 @@

/// <reference types="node" />
export interface RequestParams {

@@ -13,2 +12,3 @@ method: string;

body_read_time: number;
decompress_time: number;
deserialize_time: number;

@@ -18,3 +18,3 @@ };

body?: T;
headers: Headers;
headers: Record<string, string>;
request_timing: RequestTiming;

@@ -38,2 +38,3 @@ }>;

private baseUrl;
private origin;
private apiKey;

@@ -40,0 +41,0 @@ readonly userAgent: string;

91

dist/httpClient.js
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.TurbopufferError = exports.createHTTPClient = void 0;
const undici_1 = require("undici");
const pako_1 = __importDefault(require("pako"));
const package_json_1 = require("../package.json");
const node_zlib_1 = require("node:zlib");
const node_util_1 = require("node:util");
const gunzipAsync = (0, node_util_1.promisify)(node_zlib_1.gunzip);
const gzipAsync = (0, node_util_1.promisify)(node_zlib_1.gzip);
/**

@@ -25,2 +25,4 @@ * This a helper function that returns a class for making fetch requests

this.baseUrl = baseUrl;
this.origin = new URL(baseUrl);
this.origin.pathname = "";
this.apiKey = apiKey;

@@ -53,2 +55,6 @@ this.agent = new undici_1.Agent({

}
path = url.pathname;
if (query) {
path += "?" + url.search;
}
const headers = {

@@ -68,3 +74,3 @@ // eslint-disable-next-line @typescript-eslint/naming-convention

headers["Content-Encoding"] = "gzip";
requestBody = pako_1.default.gzip(JSON.stringify(body));
requestBody = await gzipAsync(JSON.stringify(body));
}

@@ -83,7 +89,8 @@ else if (body) {

try {
response = await (0, undici_1.fetch)(url.toString(), {
method,
response = await this.agent.request({
origin: this.origin,
path,
method: method,
headers,
body: requestBody,
dispatcher: this.agent,
});

@@ -112,7 +119,8 @@ }

response_start = performance.now();
if (!error && response.status >= 400) {
if (!error && response.statusCode >= 400) {
let message = undefined;
if (response.headers.get("Content-Type") === "application/json") {
const { body_text } = await consumeResponseText(response);
if (response.headers["content-type"] === "application/json") {
try {
const body = (await response.json());
const body = JSON.parse(body_text);
if (body && body.status === "error") {

@@ -122,3 +130,3 @@ message = body.error;

else {
message = JSON.stringify(body);
message = body_text;
}

@@ -131,14 +139,6 @@ }

else {
try {
const body = await response.text();
if (body) {
message = body;
}
}
catch (_) {
/* empty */
}
message = body_text;
}
error = new TurbopufferError(message !== null && message !== void 0 ? message : response.statusText, {
status: response.status,
error = new TurbopufferError(message !== null && message !== void 0 ? message : `http error ${response.statusCode}`, {
status: response.statusCode,
});

@@ -159,10 +159,7 @@ }

return {
headers: response.headers,
headers: convertHeadersType(response.headers),
request_timing: make_request_timing(request_start, response_start),
};
}
// internally json() will read the full body, decode utf-8, and allocate a string,
// so splitting it up into text() and JSON.parse() has no performance impact
const body_text = await response.text();
const body_read_end = performance.now();
const { body_text, body_read_end, decompress_end } = await consumeResponseText(response);
const json = JSON.parse(body_text);

@@ -172,3 +169,3 @@ const deserialize_end = performance.now();

throw new TurbopufferError(json.error || json, {
status: response.status,
status: response.statusCode,
});

@@ -178,4 +175,4 @@ }

body: json,
headers: response.headers,
request_timing: make_request_timing(request_start, response_start, body_read_end, deserialize_end),
headers: convertHeadersType(response.headers),
request_timing: make_request_timing(request_start, response_start, body_read_end, decompress_end, deserialize_end),
};

@@ -201,9 +198,37 @@ }

}
function make_request_timing(request_start, response_start, body_read_end, deserialize_end) {
function make_request_timing(request_start, response_start, body_read_end, decompress_end, deserialize_end) {
return {
response_time: response_start - request_start,
body_read_time: body_read_end ? body_read_end - response_start : 0,
deserialize_time: deserialize_end && body_read_end ? deserialize_end - body_read_end : 0,
decompress_time: decompress_end && body_read_end ? decompress_end - body_read_end : 0,
deserialize_time: deserialize_end && decompress_end ? deserialize_end - decompress_end : 0,
};
}
function convertHeadersType(headers) {
for (const key in headers) {
const v = headers[key];
if (v === undefined) {
delete headers[key];
}
else if (Array.isArray(v)) {
headers[key] = v[0];
}
}
return headers;
}
async function consumeResponseText(response) {
if (response.headers["content-encoding"] == "gzip") {
const body_buffer = await response.body.arrayBuffer();
const body_read_end = performance.now();
const gunzip_buffer = await gunzipAsync(body_buffer);
const body_text = gunzip_buffer.toString(); // is there a better way?
const decompress_end = performance.now();
return { body_text, body_read_end, decompress_end };
}
else {
const body_text = await response.body.text();
const body_read_end = performance.now();
return { body_text, body_read_end, decompress_end: body_read_end };
}
}
//# sourceMappingURL=httpClient.js.map

@@ -51,2 +51,3 @@ /**

deserialize_time: number;
decompress_time: number;
}

@@ -53,0 +54,0 @@ export interface NamespaceMetadata {

@@ -141,3 +141,3 @@ "use strict";

});
const serverTimingStr = response.headers.get("Server-Timing");
const serverTimingStr = response.headers["server-timing"];
const serverTiming = serverTimingStr

@@ -149,3 +149,3 @@ ? parseServerTiming(serverTimingStr)

metrics: {
approx_namespace_size: parseIntMetric(response.headers.get("X-turbopuffer-Approx-Namespace-Size")),
approx_namespace_size: parseIntMetric(response.headers["x-turbopuffer-approx-namespace-size"]),
cache_hit_ratio: parseFloatMetric(serverTiming["cache.hit_ratio"]),

@@ -157,2 +157,3 @@ cache_temperature: serverTiming["cache.temperature"],

body_read_time: response.request_timing.body_read_time,
decompress_time: response.request_timing.decompress_time,
deserialize_time: response.request_timing.deserialize_time,

@@ -193,5 +194,5 @@ },

id: this.id,
approx_count: parseInt(response.headers.get("X-turbopuffer-Approx-Num-Vectors")),
dimensions: parseInt(response.headers.get("X-turbopuffer-Dimensions")),
created_at: new Date(response.headers.get("X-turbopuffer-Created-At")),
approx_count: parseInt(response.headers["x-turbopuffer-approx-num-vectors"]),
dimensions: parseInt(response.headers["x-turbopuffer-dimensions"]),
created_at: new Date(response.headers["x-turbopuffer-created-at"]),
};

@@ -198,0 +199,0 @@ }

{
"name": "@turbopuffer/turbopuffer",
"version": "0.5.6",
"version": "0.5.7",
"description": "Official Typescript API client library for turbopuffer.com",

@@ -48,4 +48,4 @@ "scripts": {

"pako": "^2.1.0",
"undici": "^6.13.0"
"undici": "^6.19.8"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc