Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@miniflare/r2

Package Overview
Dependencies
Maintainers
2
Versions
23
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@miniflare/r2 - npm Package Compare versions

Comparing version 2.11.0 to 2.12.0

125

dist/src/index.d.ts

@@ -5,2 +5,3 @@ /// <reference types="node" />

import { Headers } from 'undici';
import { ParsedRange } from '@miniflare/shared';
import { Plugin } from '@miniflare/shared';

@@ -13,10 +14,41 @@ import { PluginContext } from '@miniflare/shared';

export declare function createHash(input: Uint8Array): string;
export declare class Checksums implements R2Checksums<ArrayBuffer> {
#private;
constructor(checksums: R2Checksums<string>);
get md5(): ArrayBuffer | undefined;
get sha1(): ArrayBuffer | undefined;
get sha256(): ArrayBuffer | undefined;
get sha384(): ArrayBuffer | undefined;
get sha512(): ArrayBuffer | undefined;
toJSON(): R2Checksums<string>;
}
export declare function createMD5Hash(input: Uint8Array): string;
export declare function createMultipartUpload(key: string, metadata: R2MultipartIndexMetadata, opts: InternalR2MultipartUploadOptions): Promise<R2MultipartUpload>;
export declare function createVersion(): string;
export declare function deleteMultipartParts(storage: Storage, key: string, uploadId: string, excludeKeys?: Set<string>): Promise<void>;
export declare function getMultipartValue(storage: Storage, key: string, multipart: R2MultipartReference, range: ParsedRange): ReadableStream<Uint8Array>;
export declare const HEX_REGEXP: RegExp;
/* Excluded from this release type: _INTERNAL_PREFIX */
export declare interface InternalR2BucketOptions {
blockGlobalAsyncIO?: boolean;
listRespectInclude?: boolean;
minMultipartUploadSize?: number;
}
export declare interface InternalR2MultipartUploadOptions {
storage: Storage;
blockGlobalAsyncIO?: boolean;
minMultipartUploadSize?: number;
}
export declare const MAX_KEY_SIZE = 1024;
export declare function parseHttpMetadata(httpMetadata?: R2HTTPMetadata | Headers): R2HTTPMetadata;

@@ -28,5 +60,27 @@

export declare const R2_HASH_ALGORITHMS: readonly [{
readonly name: "MD5";
readonly field: "md5";
readonly expectedBytes: 16;
}, {
readonly name: "SHA-1";
readonly field: "sha1";
readonly expectedBytes: 20;
}, {
readonly name: "SHA-256";
readonly field: "sha256";
readonly expectedBytes: 32;
}, {
readonly name: "SHA-384";
readonly field: "sha384";
readonly expectedBytes: 48;
}, {
readonly name: "SHA-512";
readonly field: "sha512";
readonly expectedBytes: 64;
}];
export declare class R2Bucket {
#private;
constructor(storage: Storage, { blockGlobalAsyncIO }?: InternalR2BucketOptions);
constructor(storage: Storage, { blockGlobalAsyncIO, listRespectInclude, minMultipartUploadSize, }?: InternalR2BucketOptions);
head(key: string): Promise<R2Object | null>;

@@ -39,6 +93,16 @@ /**

put(key: string, value: R2PutValueType, options?: R2PutOptions): Promise<R2Object | null>;
delete(key: string): Promise<void>;
delete(keys: string | string[]): Promise<void>;
list(listOptions?: R2ListOptions): Promise<R2Objects>;
createMultipartUpload(key: string, options?: R2MultipartOptions): Promise<R2MultipartUpload>;
resumeMultipartUpload(key: string, uploadId: string): Promise<R2MultipartUpload>;
}
export declare interface R2Checksums<T extends ArrayBuffer | string> {
md5?: T;
sha1?: T;
sha256?: T;
sha384?: T;
sha512?: T;
}
export declare interface R2Conditional {

@@ -60,5 +124,7 @@ etagMatches?: string | string[];

onlyIf?: R2Conditional | Headers;
range?: R2Range;
range?: R2Range | Headers;
}
export declare type R2HashAlgorithm = typeof R2_HASH_ALGORITHMS[number];
/**

@@ -95,2 +161,33 @@ * Metadata that's automatically rendered into R2 HTTP API endpoints.

export declare type R2MultipartIndexMetadata = R2MultipartPendingIndexMetadata | {
aborted: true;
} | {
completed: true;
};
export declare type R2MultipartOptions = Pick<R2PutOptions, "httpMetadata" | "customMetadata">;
export declare interface R2MultipartPendingIndexMetadata {
httpMetadata: R2ObjectMetadata["httpMetadata"];
customMetadata: R2ObjectMetadata["customMetadata"];
}
export declare interface R2MultipartReference {
uploadId: string;
parts: {
partNumber: number;
size: number;
}[];
}
export declare class R2MultipartUpload {
#private;
readonly key: string;
readonly uploadId: string;
constructor(key: string, uploadId: string, opts: InternalR2MultipartUploadOptions);
uploadPart(partNumber: number, value: ReadableStream | ArrayBuffer | ArrayBufferView | string | Blob_2): Promise<R2UploadedPart>;
abort(): Promise<void>;
complete(uploadedParts: R2UploadedPart[]): Promise<R2Object>;
}
/**

@@ -103,2 +200,3 @@ * R2Object is created when you PUT an object into an R2 bucket.

export declare class R2Object {
#private;
readonly key: string;

@@ -114,2 +212,3 @@ readonly version: string;

constructor(metadata: R2ObjectMetadata);
get checksums(): Checksums;
writeHttpMetadata(headers: Headers): void;

@@ -120,4 +219,4 @@ }

readonly body: ReadableStream<Uint8Array>;
readonly bodyUsed: boolean;
constructor(metadata: R2ObjectMetadata, value: Uint8Array);
constructor(metadata: R2ObjectMetadata, value: Uint8Array | ReadableStream<Uint8Array>);
get bodyUsed(): boolean;
arrayBuffer(): Promise<ArrayBuffer>;

@@ -139,2 +238,4 @@ text(): Promise<string>;

range?: R2Range;
checksums?: R2Checksums<string>;
multipart?: R2MultipartReference;
}

@@ -163,7 +264,6 @@

export declare interface R2PutOptions {
export declare interface R2PutOptions extends R2Checksums<ArrayBuffer | string> {
onlyIf?: R2Conditional | Headers;
httpMetadata?: R2HTTPMetadata | Headers;
customMetadata?: Record<string, string>;
md5?: ArrayBuffer | string;
}

@@ -179,6 +279,13 @@

export declare function testR2Conditional(conditional: R2Conditional, metadata?: R2ObjectMetadata): boolean;
export declare interface R2UploadedPart {
partNumber: number;
etag: string;
}
export declare function testR2Conditional(conditional: R2Conditional, metadata?: Pick<R2ObjectMetadata, "etag" | "uploaded">): boolean;
export declare function validateMultipartKey(method: string, key: string): void;
/* Excluded from this release type: _valueToArray */
export { }

@@ -36,13 +36,23 @@ var __create = Object.create;

__export(exports, {
Checksums: () => Checksums,
HEX_REGEXP: () => HEX_REGEXP,
MAX_KEY_SIZE: () => MAX_KEY_SIZE,
R2Bucket: () => R2Bucket,
R2MultipartUpload: () => R2MultipartUpload,
R2Object: () => R2Object,
R2ObjectBody: () => R2ObjectBody,
R2Plugin: () => R2Plugin,
R2_HASH_ALGORITHMS: () => R2_HASH_ALGORITHMS,
_INTERNAL_PREFIX: () => _INTERNAL_PREFIX,
_valueToArray: () => _valueToArray,
createHash: () => createHash,
createMD5Hash: () => createMD5Hash,
createMultipartUpload: () => createMultipartUpload,
createVersion: () => createVersion,
deleteMultipartParts: () => deleteMultipartParts,
getMultipartValue: () => getMultipartValue,
parseHttpMetadata: () => parseHttpMetadata,
parseOnlyIf: () => parseOnlyIf,
parseR2ObjectMetadata: () => parseR2ObjectMetadata,
testR2Conditional: () => testR2Conditional
testR2Conditional: () => testR2Conditional,
validateMultipartKey: () => validateMultipartKey
});

@@ -52,18 +62,65 @@

var import_buffer2 = __toModule(require("buffer"));
var import_crypto3 = __toModule(require("crypto"));
var import_consumers3 = __toModule(require("stream/consumers"));
var import_web3 = __toModule(require("stream/web"));
var import_util2 = __toModule(require("util"));
var import_core3 = __toModule(require("@miniflare/core"));
var import_shared3 = __toModule(require("@miniflare/shared"));
var import_undici2 = __toModule(require("undici"));
// packages/r2/src/multipart.ts
var import_assert2 = __toModule(require("assert"));
var import_buffer = __toModule(require("buffer"));
var import_crypto2 = __toModule(require("crypto"));
var import_consumers2 = __toModule(require("stream/consumers"));
var import_web2 = __toModule(require("stream/web"));
var import_util2 = __toModule(require("util"));
var import_util = __toModule(require("util"));
var import_core2 = __toModule(require("@miniflare/core"));
var import_shared2 = __toModule(require("@miniflare/shared"));
var import_undici2 = __toModule(require("undici"));
// packages/r2/src/r2Object.ts
var import_buffer = __toModule(require("buffer"));
var import_assert = __toModule(require("assert"));
var import_crypto = __toModule(require("crypto"));
var import_consumers = __toModule(require("stream/consumers"));
var import_web = __toModule(require("stream/web"));
var import_util = __toModule(require("util"));
var import_core = __toModule(require("@miniflare/core"));
var import_shared = __toModule(require("@miniflare/shared"));
var import_undici = __toModule(require("undici"));
var decoder = new import_util.TextDecoder();
function createHash(input) {
var MAX_KEY_SIZE = 1024;
var HEX_REGEXP = /^[A-Fa-f0-9]*$/;
var R2_HASH_ALGORITHMS = [
{ name: "MD5", field: "md5", expectedBytes: 16 },
{ name: "SHA-1", field: "sha1", expectedBytes: 20 },
{ name: "SHA-256", field: "sha256", expectedBytes: 32 },
{ name: "SHA-384", field: "sha384", expectedBytes: 48 },
{ name: "SHA-512", field: "sha512", expectedBytes: 64 }
];
function maybeHexDecode(hex) {
return hex === void 0 ? void 0 : (0, import_shared.viewToBuffer)(Buffer.from(hex, "hex"));
}
var Checksums = class {
#checksums;
constructor(checksums) {
this.#checksums = checksums;
}
get md5() {
return maybeHexDecode(this.#checksums.md5);
}
get sha1() {
return maybeHexDecode(this.#checksums.sha1);
}
get sha256() {
return maybeHexDecode(this.#checksums.sha256);
}
get sha384() {
return maybeHexDecode(this.#checksums.sha384);
}
get sha512() {
return maybeHexDecode(this.#checksums.sha512);
}
toJSON() {
return this.#checksums;
}
};
function createMD5Hash(input) {
return import_crypto.default.createHash("md5").update(input).digest("hex");

@@ -190,2 +247,3 @@ }

range;
#checksums;
constructor(metadata) {

@@ -201,3 +259,12 @@ this.key = metadata.key;

this.range = metadata.range;
const checksums = { ...metadata.checksums };
if (metadata.multipart === void 0) {
(0, import_assert.default)(metadata.etag.length === 32 && HEX_REGEXP.test(metadata.etag), "Expected `etag` to be an MD5 hash");
checksums.md5 = metadata.etag;
}
this.#checksums = new Checksums(checksums);
}
get checksums() {
return this.#checksums;
}
writeHttpMetadata(headers) {

@@ -212,8 +279,8 @@ for (const [key, value] of Object.entries(this.httpMetadata)) {

body;
bodyUsed = false;
constructor(metadata, value) {
super(metadata);
const setBodyUsed = () => {
this.bodyUsed = true;
};
if (value instanceof import_web.ReadableStream) {
this.body = value;
return;
}
this.body = new import_web.ReadableStream({

@@ -227,29 +294,342 @@ type: "bytes",

controller.byobRequest?.respond(0);
setBodyUsed();
}
});
}
get bodyUsed() {
return (0, import_core._isDisturbedStream)(this.body);
}
async arrayBuffer() {
if (this.bodyUsed)
throw new TypeError("Body already used.");
return (0, import_consumers.arrayBuffer)(this.body);
return import_consumers.default.arrayBuffer(this.body);
}
async text() {
return decoder.decode(await this.arrayBuffer());
if (this.bodyUsed)
throw new TypeError("Body already used.");
return import_consumers.default.text(this.body);
}
async json() {
return JSON.parse(await this.text());
if (this.bodyUsed)
throw new TypeError("Body already used.");
return import_consumers.default.json(this.body);
}
async blob() {
const ab = await this.arrayBuffer();
return new import_buffer.Blob([new Uint8Array(ab)]);
if (this.bodyUsed)
throw new TypeError("Body already used.");
return import_consumers.default.blob(this.body);
}
};
// packages/r2/src/multipart.ts
var _INTERNAL_PREFIX = "__MINIFLARE_INTERNAL__";
var MIN_MULTIPART_UPLOAD_SIZE = 5 * 1024 * 1024;
var encoder = new import_util.TextEncoder();
function validateMultipartKey(method, key) {
if (Buffer.byteLength(key) > MAX_KEY_SIZE || key.startsWith(_INTERNAL_PREFIX)) {
throw new TypeError(`${method}: The specified object name is not valid. (10020)`);
}
}
function validatePartNumber(partNumber) {
if (partNumber >= 1 && partNumber <= 1e4)
return;
throw new TypeError(`Part number must be between 1 and 10000 (inclusive). Actual value was: ${partNumber}`);
}
function generateId(likelyOnFilesystem = false) {
const size = likelyOnFilesystem && process.platform === "win32" ? 32 : 128;
return import_crypto2.default.randomBytes(size).toString("base64url");
}
function generateMultipartEtag(md5Hexes) {
const hash = import_crypto2.default.createHash("md5");
for (const md5Hex of md5Hexes)
hash.update(Buffer.from(md5Hex, "hex"));
return `${hash.digest("hex")}-${md5Hexes.length}`;
}
var INDEX = "index";
function buildKey(key, uploadId, part) {
return `${_INTERNAL_PREFIX}:multipart:${uploadId}:${key}:${part ?? INDEX}`;
}
function isKnownLengthStream(stream) {
return (0, import_core2._isBodyStream)(stream) || (0, import_core2._isFixedLengthStream)(stream);
}
async function createMultipartUpload(key, metadata, opts) {
const uploadId = generateId(true);
const indexKey = buildKey(key, uploadId);
await opts.storage.put(indexKey, {
value: new Uint8Array(),
metadata
});
return new R2MultipartUpload(key, uploadId, opts);
}
function overlaps(a, b) {
return a.start < b.end && b.start < a.end;
}
function getMultipartValue(storage, key, multipart, range) {
const queryRange = {
start: range.offset,
end: range.offset + range.length
};
const parts = [];
let start = 0;
for (const part of multipart.parts) {
const partRange = { start, end: start + part.size };
if (overlaps(partRange, queryRange)) {
parts.push({
partNumber: part.partNumber,
start: Math.max(partRange.start, queryRange.start) - partRange.start,
end: Math.min(partRange.end, queryRange.end) - partRange.start
});
}
start = partRange.end;
}
return new import_web2.ReadableStream({
type: "bytes",
async pull(controller) {
const part = parts.shift();
if (part === void 0) {
await (0, import_shared2.waitForOpenInputGate)();
controller.close();
controller.byobRequest?.respond(0);
} else {
const partKey = buildKey(key, multipart.uploadId, part.partNumber);
const value = await storage.getRange(partKey, { offset: part.start, length: part.end - part.start }, true);
(0, import_assert2.default)(value !== void 0);
await (0, import_shared2.waitForOpenInputGate)();
if (value.value.byteLength > 0)
controller.enqueue(value.value);
}
}
});
}
async function deleteMultipartParts(storage, key, uploadId, excludeKeys) {
const indexKey = buildKey(key, uploadId);
const partPrefix = indexKey.substring(0, indexKey.length - INDEX.length);
const { keys } = await storage.list({ prefix: partPrefix });
const partKeys = [];
for (const key2 of keys) {
if (key2.name !== indexKey && (excludeKeys === void 0 || !excludeKeys.has(key2.name))) {
partKeys.push(key2.name);
}
}
await storage.deleteMany(partKeys);
}
var R2MultipartUpload = class {
#storage;
#blockGlobalAsyncIO;
#minMultipartUploadSize;
key;
uploadId;
constructor(key, uploadId, opts) {
this.#storage = opts.storage;
this.#blockGlobalAsyncIO = opts.blockGlobalAsyncIO ?? false;
this.#minMultipartUploadSize = opts.minMultipartUploadSize ?? MIN_MULTIPART_UPLOAD_SIZE;
Object.defineProperties(this, {
key: {
enumerable: true,
get() {
return key;
},
set() {
throw new TypeError("Cannot assign to read only property 'key' of object '#<R2MultipartUpload>'");
}
},
uploadId: {
enumerable: true,
get() {
return uploadId;
},
set() {
throw new TypeError("Cannot assign to read only property 'uploadId' of object '#<R2MultipartUpload>'");
}
}
});
}
#prepareCtx() {
if (this.#blockGlobalAsyncIO)
(0, import_shared2.assertInRequest)();
const ctx = (0, import_shared2.getRequestContext)();
ctx?.incrementInternalSubrequests();
return ctx;
}
async #state() {
const meta = await this.#storage.head(buildKey(this.key, this.uploadId));
if (meta?.metadata === void 0) {
return { exists: false, aborted: false, completed: false };
}
if ("aborted" in meta.metadata) {
return { exists: false, aborted: true, completed: false };
}
if ("completed" in meta.metadata) {
return { exists: false, aborted: false, completed: true };
}
return { exists: true, meta: meta.metadata };
}
async uploadPart(partNumber, value) {
const ctx = this.#prepareCtx();
if (arguments.length === 0) {
throw new TypeError("Failed to execute 'uploadPart' on 'R2MultipartUpload': parameter 1 is not of type 'integer'.");
}
if (typeof partNumber !== "number") {
partNumber = parseInt(String(partNumber));
}
if (isNaN(partNumber))
partNumber = 0;
let valueArray;
if (typeof value === "string") {
valueArray = encoder.encode(value);
} else if (value instanceof ArrayBuffer) {
valueArray = new Uint8Array(value);
} else if (ArrayBuffer.isView(value)) {
valueArray = (0, import_shared2.viewToArray)(value);
} else if (value instanceof import_buffer.Blob) {
valueArray = new Uint8Array(await value.arrayBuffer());
} else if (value instanceof import_web2.ReadableStream) {
if (!isKnownLengthStream(value)) {
throw new TypeError("Provided readable stream must have a known length (request/response body or readable half of FixedLengthStream)");
}
valueArray = new Uint8Array(await (0, import_consumers2.arrayBuffer)(value));
} else {
throw new TypeError("Failed to execute 'uploadPart' on 'R2MultipartUpload': parameter 2 is not of type 'ReadableStream or ArrayBuffer or ArrayBufferView or string or Blob'.");
}
validatePartNumber(partNumber);
validateMultipartKey("uploadPart", this.key);
if (!(await this.#state()).exists) {
throw new Error("uploadPart: The specified multipart upload does not exist. (10024)");
}
const partKey = buildKey(this.key, this.uploadId, partNumber);
const etag = generateId();
await this.#storage.put(partKey, {
value: valueArray,
metadata: {
size: valueArray.byteLength,
md5: createMD5Hash(valueArray),
etag
}
});
await (0, import_shared2.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return { partNumber, etag };
}
async abort() {
const ctx = this.#prepareCtx();
validateMultipartKey("abortMultipartUpload", this.key);
const state = await this.#state();
if (!state.exists) {
if (state.aborted || state.completed) {
await (0, import_shared2.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return;
} else {
throw new Error("abortMultipartUpload: We encountered an internal error. Please try again. (10001)");
}
}
await deleteMultipartParts(this.#storage, this.key, this.uploadId);
const indexKey = buildKey(this.key, this.uploadId);
await this.#storage.put(indexKey, {
value: new Uint8Array(),
metadata: { aborted: true }
});
await (0, import_shared2.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
}
async complete(uploadedParts) {
const ctx = this.#prepareCtx();
if (!Array.isArray(uploadedParts)) {
throw new TypeError("Failed to execute 'complete' on 'R2MultipartUpload': parameter 1 is not of type 'Array'.");
}
uploadedParts = uploadedParts.map((part, i) => {
if (typeof part !== "object") {
throw new TypeError(`Incorrect type for array element ${i}: the provided value is not of type 'UploadedPart'.`);
}
part = { partNumber: part.partNumber, etag: part.etag };
if (typeof part.partNumber !== "number") {
part.partNumber = parseInt(String(part.partNumber));
}
if (isNaN(part.partNumber))
part.partNumber = 0;
part.etag = String(part.etag);
return part;
});
for (const part of uploadedParts) {
validatePartNumber(part.partNumber);
}
validateMultipartKey("completeMultipartUpload", this.key);
const state = await this.#state();
if (!state.exists) {
throw new Error(state.completed ? "completeMultipartUpload: The specified multipart upload does not exist. (10024)" : "completeMultipartUpload: We encountered an internal error. Please try again. (10001)");
}
const partNumberSet = new Set();
for (const { partNumber } of uploadedParts) {
if (partNumberSet.has(partNumber)) {
throw new Error("completeMultipartUpload: We encountered an internal error. Please try again. (10001)");
}
partNumberSet.add(partNumber);
}
const partMetas = await Promise.all(uploadedParts.map(({ partNumber }) => {
const partKey = buildKey(this.key, this.uploadId, partNumber);
return this.#storage.head(partKey);
}));
const parts = partMetas.map((partMeta, i) => {
const uploadedPart = uploadedParts[i];
if (partMeta?.metadata === void 0 || partMeta.metadata.etag !== uploadedPart.etag) {
throw new Error("completeMultipartUpload: One or more of the specified parts could not be found. (10025)");
}
return { ...uploadedPart, ...partMeta.metadata };
});
for (const part of parts.slice(0, -1)) {
if (part.size < this.#minMultipartUploadSize) {
throw new Error("completeMultipartUpload: Your proposed upload is smaller than the minimum allowed object size.");
}
}
parts.sort((a, b) => a.partNumber - b.partNumber);
let partSize;
for (const part of parts.slice(0, -1)) {
if (partSize === void 0)
partSize = part.size;
if (part.size < this.#minMultipartUploadSize || part.size !== partSize) {
throw new Error("completeMultipartUpload: There was a problem with the multipart upload. (10048)");
}
}
if (partSize !== void 0 && parts[parts.length - 1].size > partSize) {
throw new Error("completeMultipartUpload: There was a problem with the multipart upload. (10048)");
}
const existingMeta = await this.#storage.head(this.key);
const indexKey = buildKey(this.key, this.uploadId);
const totalSize = parts.reduce((acc, { size }) => acc + size, 0);
const etag = generateMultipartEtag(parts.map(({ md5 }) => md5));
const metadata = {
key: this.key,
version: createVersion(),
size: totalSize,
etag,
httpEtag: `"${etag}"`,
uploaded: new Date(),
httpMetadata: state.meta.httpMetadata,
customMetadata: state.meta.customMetadata,
checksums: {},
multipart: {
uploadId: this.uploadId,
parts: parts.map(({ partNumber, size }) => ({ partNumber, size }))
}
};
await (0, import_shared2.waitForOpenOutputGate)();
await this.#storage.putMany([
[this.key, { value: new Uint8Array(), metadata }],
[indexKey, { value: new Uint8Array(), metadata: { completed: true } }]
]);
await (0, import_shared2.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
const used = new Set(parts.map(({ partNumber }) => buildKey(this.key, this.uploadId, partNumber)));
await deleteMultipartParts(this.#storage, this.key, this.uploadId, used);
if (existingMeta?.metadata?.multipart !== void 0) {
await deleteMultipartParts(this.#storage, this.key, existingMeta.metadata.multipart.uploadId);
}
return new R2Object(metadata);
}
};
// packages/r2/src/bucket.ts
var MAX_LIST_KEYS = 1e3;
var MAX_KEY_SIZE = 1024;
var MAX_VALUE_SIZE = 5 * 1e3 * 1e3 * 1e3 - 5 * 1e3 * 1e3;
var UNPAIRED_SURROGATE_PAIR_REGEX = /^(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])$/;
var encoder = new import_util2.TextEncoder();
var encoder2 = new import_util2.TextEncoder();
function throwR2Error(method, status, message) {

@@ -259,3 +639,3 @@ throw new Error(`R2 ${method} failed: (${status}) ${message}`);

function validateKey(method, key) {
const keyLength = encoder.encode(key).byteLength;
const keyLength = encoder2.encode(key).byteLength;
if (UNPAIRED_SURROGATE_PAIR_REGEX.test(key)) {

@@ -267,2 +647,6 @@ throwR2Error(method, 400, "Key contains an illegal unicode value(s).");

}
if (key.startsWith(_INTERNAL_PREFIX)) {
throwR2Error(method, 400, `Key cannot start with "${_INTERNAL_PREFIX}".`);
}
return key;
}

@@ -295,2 +679,4 @@ function validateOnlyIf(onlyIf, method) {

}
if (range instanceof import_undici2.Headers)
return;
const { offset, length, suffix } = range;

@@ -330,4 +716,43 @@ if (offset !== void 0) {

}
function validatePutHash(options, alg) {
const hash = options[alg.field];
let buffer;
if (hash === void 0) {
return;
} else if (hash instanceof ArrayBuffer) {
buffer = Buffer.from(hash);
} else if (ArrayBuffer.isView(hash)) {
buffer = Buffer.from((0, import_shared3.viewToArray)(hash));
} else if (typeof hash === "string") {
const expectedHex = alg.expectedBytes * 2;
if (hash.length !== expectedHex) {
throw new TypeError(`${alg.name} is ${expectedHex} hex characters, not ${hash.length}`);
}
if (!HEX_REGEXP.test(hash)) {
throw new TypeError(`Provided ${alg.name} wasn't a valid hex string`);
}
buffer = Buffer.from(hash, "hex");
} else {
throw new TypeError(`Incorrect type for the '${alg.field}' field on 'PutOptions': the provided value is not of type 'ArrayBuffer or ArrayBufferView or string'.`);
}
if (buffer.byteLength !== alg.expectedBytes) {
throw new TypeError(`${alg.name} is ${alg.expectedBytes} bytes, not ${buffer.byteLength}`);
}
return { alg, hash: buffer };
}
function validatePutHashes(options) {
let hash;
for (const alg of R2_HASH_ALGORITHMS) {
const validatedHash = validatePutHash(options, alg);
if (validatedHash !== void 0) {
if (hash !== void 0) {
throw new TypeError("You cannot specify multiple hashing algorithms.");
}
hash = validatedHash;
}
}
return hash;
}
function validatePutOptions(options) {
const { onlyIf = {}, httpMetadata, customMetadata, md5 } = options;
const { onlyIf = {}, httpMetadata, customMetadata } = options;
validateOnlyIf(onlyIf, "PUT");

@@ -345,5 +770,3 @@ validateHttpMetadata(httpMetadata);

}
if (md5 !== void 0 && !(md5 instanceof ArrayBuffer) && typeof md5 !== "string") {
throwR2Error("PUT", 400, "md5 must be a string, ArrayBuffer, or undefined.");
}
return validatePutHashes(options);
}

@@ -385,9 +808,9 @@ function validateListOptions(options) {

if (typeof value === "string") {
return encoder.encode(value);
} else if (value instanceof import_web2.ReadableStream) {
return new Uint8Array(await (0, import_consumers2.arrayBuffer)(value));
return encoder2.encode(value);
} else if (value instanceof import_web3.ReadableStream) {
return new Uint8Array(await (0, import_consumers3.arrayBuffer)(value));
} else if (value instanceof ArrayBuffer) {
return new Uint8Array(value);
} else if (ArrayBuffer.isView(value)) {
return (0, import_shared2.viewToArray)(value);
return (0, import_shared3.viewToArray)(value);
} else if (value === null) {

@@ -401,26 +824,44 @@ return new Uint8Array();

}
function rangeHeaderToR2Range(headers, size) {
const rangeHeader = headers.get("Range");
if (rangeHeader !== null) {
const ranges = (0, import_core3.parseRanges)(rangeHeader, size);
if (ranges?.length === 1) {
const [start, end] = ranges[0];
return { offset: start, length: end - start + 1 };
}
}
return {};
}
function buildKeyTypeError(method) {
return `Failed to execute '${method}' on 'R2Bucket': parameter 1 is not of type 'string'.`;
}
var R2Bucket = class {
#storage;
#blockGlobalAsyncIO;
constructor(storage, { blockGlobalAsyncIO = false } = {}) {
#listRespectInclude;
#multipartOpts;
constructor(storage, {
blockGlobalAsyncIO = false,
listRespectInclude = true,
minMultipartUploadSize
} = {}) {
this.#storage = storage;
this.#blockGlobalAsyncIO = blockGlobalAsyncIO;
this.#listRespectInclude = listRespectInclude;
this.#multipartOpts = {
storage,
blockGlobalAsyncIO,
minMultipartUploadSize
};
}
#prepareCtx(method, key) {
#prepareCtx() {
if (this.#blockGlobalAsyncIO)
(0, import_shared2.assertInRequest)();
const ctx = (0, import_shared2.getRequestContext)();
(0, import_shared3.assertInRequest)();
const ctx = (0, import_shared3.getRequestContext)();
ctx?.incrementInternalSubrequests();
if (method !== "LIST" && typeof key !== "string") {
throw new TypeError(`Failed to execute '${method.toLowerCase()}' on 'R2Bucket': parameter 1 is not of type 'string'.`);
}
return ctx;
}
async #head(key, ctx) {
if (ctx === void 0)
ctx = this.#prepareCtx("HEAD", key);
validateKey("HEAD", key);
async #head(key) {
const stored = await this.#storage.head(key);
await (0, import_shared2.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
if (stored?.metadata === void 0)

@@ -430,46 +871,78 @@ return null;

parseR2ObjectMetadata(metadata);
return new R2Object(metadata);
return metadata;
}
async head(key) {
return this.#head(key);
const ctx = this.#prepareCtx();
if (arguments.length === 0) {
throw new TypeError(buildKeyTypeError("head"));
}
key = String(key);
validateKey("HEAD", key);
const meta = await this.#head(key);
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return meta === null ? null : new R2Object(meta);
}
async get(key, options) {
const ctx = this.#prepareCtx("GET", key);
const ctx = this.#prepareCtx();
options = options ?? {};
const { range = {} } = options;
let { range = {} } = options;
if (arguments.length === 0) {
throw new TypeError(buildKeyTypeError("get"));
}
key = String(key);
validateKey("GET", key);
validateGetOptions(options);
const onlyIf = parseOnlyIf(options.onlyIf);
const meta = await this.#head(key, ctx);
if (meta === null)
const meta = await this.#head(key);
if (meta === null) {
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return null;
if (!testR2Conditional(onlyIf, meta) || meta?.size === 0) {
}
if (!testR2Conditional(onlyIf, meta)) {
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return new R2Object(meta);
}
let stored;
if (range instanceof import_undici2.Headers) {
range = rangeHeaderToR2Range(range, meta.size);
}
let value;
try {
stored = await this.#storage.getRange(key, range);
if (meta.size === 0) {
value = new Uint8Array();
} else if (meta.multipart !== void 0) {
const parsedRange = (0, import_shared3.parseRange)(range, meta.size);
value = getMultipartValue(this.#storage, key, meta.multipart, parsedRange);
meta.range = parsedRange;
} else {
const stored = await this.#storage.getRange(key, range);
if (stored === void 0)
return null;
value = stored.value;
if ("range" in stored && stored.range !== void 0) {
meta.range = stored.range;
}
}
} catch {
throwR2Error("GET", 400, "The requested range is not satisfiable.");
}
await (0, import_shared2.waitForOpenInputGate)();
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
if (stored?.metadata === void 0)
return null;
const { value, metadata } = stored;
parseR2ObjectMetadata(metadata);
if ("range" in stored && stored.range !== void 0) {
metadata.range = stored.range;
}
return new R2ObjectBody(metadata, value);
return new R2ObjectBody(meta, value);
}
async put(key, value, options = {}) {
const ctx = this.#prepareCtx("PUT", key);
const ctx = this.#prepareCtx();
if (arguments.length === 0) {
throw new TypeError(buildKeyTypeError("put"));
}
key = String(key);
validateKey("PUT", key);
validatePutOptions(options);
const specifiedHash = validatePutOptions(options);
const { customMetadata = {} } = options;
let { md5, onlyIf, httpMetadata } = options;
let { onlyIf, httpMetadata } = options;
onlyIf = parseOnlyIf(onlyIf);
httpMetadata = parseHttpMetadata(httpMetadata);
const meta = await this.#head(key, ctx) ?? void 0;
const meta = await this.#head(key) ?? void 0;
if (!testR2Conditional(onlyIf, meta))

@@ -481,11 +954,11 @@ return null;

}
const md5Hash = createHash(toStore);
if (md5 !== void 0) {
if (md5 instanceof ArrayBuffer) {
md5 = Buffer.from(new Uint8Array(md5)).toString("hex");
const checksums = {};
if (specifiedHash !== void 0) {
const computedHash = import_crypto3.default.createHash(specifiedHash.alg.field).update(toStore).digest();
if (!specifiedHash.hash.equals(computedHash)) {
throw new Error(`put: The ${specifiedHash.alg.name} checksum you specified did not match what we received.`);
}
if (md5 !== md5Hash) {
throwR2Error("PUT", 400, "The Content-MD5 you specified did not match what we received.");
}
checksums[specifiedHash.alg.field] = computedHash.toString("hex");
}
const md5Hash = createMD5Hash(toStore);
const metadata = {

@@ -499,5 +972,6 @@ key,

httpMetadata,
customMetadata
customMetadata,
checksums
};
await (0, import_shared2.waitForOpenOutputGate)();
await (0, import_shared3.waitForOpenOutputGate)();
await this.#storage.put(key, {

@@ -507,16 +981,32 @@ value: toStore,

});
await (0, import_shared2.waitForOpenInputGate)();
if (meta?.multipart !== void 0) {
await deleteMultipartParts(this.#storage, key, meta.multipart.uploadId);
}
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return new R2Object(metadata);
}
async delete(key) {
const ctx = this.#prepareCtx("DELETE", key);
validateKey("DELETE", key);
await (0, import_shared2.waitForOpenOutputGate)();
await this.#storage.delete(key);
await (0, import_shared2.waitForOpenInputGate)();
async delete(keys) {
const ctx = this.#prepareCtx();
if (arguments.length === 0) {
throw new TypeError(buildKeyTypeError("delete"));
}
if (!Array.isArray(keys))
keys = [keys];
keys = keys.map((key) => validateKey("DELETE", String(key)));
await (0, import_shared3.waitForOpenOutputGate)();
const keyMetas = await Promise.all(keys.map((key) => this.#head(key)));
await this.#storage.deleteMany(keys);
const deletePartsPromises = keys.map((key, i) => {
const keyMeta = keyMetas[i];
if (keyMeta?.multipart !== void 0) {
return deleteMultipartParts(this.#storage, key, keyMeta.multipart.uploadId);
}
});
await Promise.all(deletePartsPromises);
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
}
async list(listOptions = {}) {
const ctx = this.#prepareCtx("LIST");
const ctx = this.#prepareCtx();
const delimitedPrefixes = new Set();

@@ -534,2 +1024,3 @@ validateListOptions(listOptions);

prefix,
excludePrefix: _INTERNAL_PREFIX,
limit,

@@ -543,6 +1034,8 @@ cursor,

const objects = res.keys.map((k) => k.metadata).filter((metadata) => metadata !== void 0).map((metadata) => {
if (!include.includes("httpMetadata"))
metadata.httpMetadata = {};
if (!include.includes("customMetadata"))
metadata.customMetadata = {};
if (this.#listRespectInclude) {
if (!include.includes("httpMetadata"))
metadata.httpMetadata = {};
if (!include.includes("customMetadata"))
metadata.customMetadata = {};
}
parseR2ObjectMetadata(metadata);

@@ -558,3 +1051,3 @@ return new R2Object(metadata);

}
await (0, import_shared2.waitForOpenInputGate)();
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();

@@ -569,7 +1062,41 @@ const cursorLength = res.cursor.length > 0;

}
async createMultipartUpload(key, options = {}) {
const ctx = this.#prepareCtx();
if (arguments.length === 0) {
throw new TypeError(buildKeyTypeError("createMultipartUpload"));
}
key = String(key);
validateMultipartKey("createMultipartUpload", key);
if (typeof options !== "object") {
throw new TypeError("Failed to execute 'createMultipartUpload' on 'R2Bucket': parameter 2 is not of type 'MultipartOptions'.");
}
if (options.customMetadata !== void 0 && typeof options.customMetadata !== "object") {
throw new TypeError("Incorrect type for the 'customMetadata' field on 'MultipartOptions': the provided value is not of type 'object'.");
}
if (options.httpMetadata !== void 0 && typeof options.httpMetadata !== "object") {
throw new TypeError("Incorrect type for the 'httpMetadata' field on 'MultipartOptions': the provided value is not of type 'HttpMetadata or Headers'.");
}
const customMetadata = options.customMetadata ?? {};
const httpMetadata = parseHttpMetadata(options.httpMetadata);
const upload = await createMultipartUpload(key, { customMetadata, httpMetadata }, this.#multipartOpts);
await (0, import_shared3.waitForOpenInputGate)();
ctx?.advanceCurrentTime();
return upload;
}
async resumeMultipartUpload(key, uploadId) {
if (arguments.length === 0) {
throw new TypeError(buildKeyTypeError("resumeMultipartUpload"));
}
if (arguments.length === 1) {
throw new TypeError("Failed to execute 'resumeMultipartUpload' on 'R2Bucket': parameter 2 is not of type 'string'.");
}
key = String(key);
uploadId = String(uploadId);
return new R2MultipartUpload(key, uploadId, this.#multipartOpts);
}
};
// packages/r2/src/plugin.ts
var import_shared3 = __toModule(require("@miniflare/shared"));
var R2Plugin = class extends import_shared3.Plugin {
var import_shared4 = __toModule(require("@miniflare/shared"));
var R2Plugin = class extends import_shared4.Plugin {
r2Buckets;

@@ -581,7 +1108,8 @@ r2Persist;

this.assignOptions(options);
this.#persist = (0, import_shared3.resolveStoragePersist)(ctx.rootPath, this.r2Persist);
this.#persist = (0, import_shared4.resolveStoragePersist)(ctx.rootPath, this.r2Persist);
}
getBucket(storage, bucket, blockGlobalAsyncIO = false) {
return new R2Bucket(storage.storage(bucket, this.#persist), {
blockGlobalAsyncIO
blockGlobalAsyncIO,
listRespectInclude: this.ctx.compat.isEnabled("r2_list_honor_include")
});

@@ -599,4 +1127,4 @@ }

__decorateClass([
(0, import_shared3.Option)({
type: import_shared3.OptionType.ARRAY,
(0, import_shared4.Option)({
type: import_shared4.OptionType.ARRAY,
name: "r2",

@@ -610,4 +1138,4 @@ alias: "r",

__decorateClass([
(0, import_shared3.Option)({
type: import_shared3.OptionType.BOOLEAN_STRING,
(0, import_shared4.Option)({
type: import_shared4.OptionType.BOOLEAN_STRING,
description: "Persist R2 data (to optional path)",

@@ -620,14 +1148,24 @@ logName: "R2 Persistence",

0 && (module.exports = {
Checksums,
HEX_REGEXP,
MAX_KEY_SIZE,
R2Bucket,
R2MultipartUpload,
R2Object,
R2ObjectBody,
R2Plugin,
R2_HASH_ALGORITHMS,
_INTERNAL_PREFIX,
_valueToArray,
createHash,
createMD5Hash,
createMultipartUpload,
createVersion,
deleteMultipartParts,
getMultipartValue,
parseHttpMetadata,
parseOnlyIf,
parseR2ObjectMetadata,
testR2Conditional
testR2Conditional,
validateMultipartKey
});
//# sourceMappingURL=index.js.map

8

package.json
{
"name": "@miniflare/r2",
"version": "2.11.0",
"version": "2.12.0",
"description": "Workers R2 module for Miniflare: a fun, full-featured, fully-local simulator for Cloudflare Workers",

@@ -38,8 +38,8 @@ "keywords": [

"dependencies": {
"@miniflare/shared": "2.11.0",
"undici": "5.9.1"
"@miniflare/shared": "2.12.0",
"undici": "5.11.0"
},
"devDependencies": {
"@miniflare/shared-test": "2.11.0"
"@miniflare/shared-test": "2.12.0"
}
}

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc