miniflare
Advanced tools
Comparing version
@@ -19,3 +19,3 @@ // src/workers/assets/assets-kv.worker.ts | ||
), newResponse = new Response(response.body, response); | ||
return newResponse.headers.append( | ||
return contentType !== null && newResponse.headers.append( | ||
"cf-kv-metadata", | ||
@@ -22,0 +22,0 @@ `{"contentType": "${contentType}"}` |
@@ -29,5 +29,5 @@ var __create = Object.create; | ||
// ../../node_modules/.pnpm/http-cache-semantics@4.1.0/node_modules/http-cache-semantics/index.js | ||
// ../../node_modules/.pnpm/http-cache-semantics@4.1.1/node_modules/http-cache-semantics/index.js | ||
var require_http_cache_semantics = __commonJS({ | ||
"../../node_modules/.pnpm/http-cache-semantics@4.1.0/node_modules/http-cache-semantics/index.js"(exports, module) { | ||
"../../node_modules/.pnpm/http-cache-semantics@4.1.1/node_modules/http-cache-semantics/index.js"(exports, module) { | ||
"use strict"; | ||
@@ -41,2 +41,3 @@ var statusCodeCacheableByDefault = /* @__PURE__ */ new Set([ | ||
301, | ||
308, | ||
404, | ||
@@ -94,8 +95,7 @@ 405, | ||
let cc = {}; | ||
if (!header) | ||
return cc; | ||
let parts = header.trim().split(/\s*,\s*/); | ||
if (!header) return cc; | ||
let parts = header.trim().split(/,/); | ||
for (let part of parts) { | ||
let [k, v] = part.split(/\s*=\s*/, 2); | ||
cc[k] = v === void 0 ? !0 : v.replace(/^"|"$/g, ""); | ||
let [k, v] = part.split(/=/, 2); | ||
cc[k.trim()] = v === void 0 ? !0 : v.trim().replace(/^"|"$/g, ""); | ||
} | ||
@@ -176,4 +176,3 @@ return cc; | ||
for (let name of fields) | ||
if (req.headers[name] !== this._reqHeaders[name]) | ||
return !1; | ||
if (req.headers[name] !== this._reqHeaders[name]) return !1; | ||
return !0; | ||
@@ -271,6 +270,4 @@ } | ||
_fromObject(obj) { | ||
if (this._responseTime) | ||
throw Error("Reinitialized"); | ||
if (!obj || obj.v !== 1) | ||
throw Error("Invalid serialization"); | ||
if (this._responseTime) throw Error("Reinitialized"); | ||
if (!obj || obj.v !== 1) throw Error("Invalid serialization"); | ||
this._responseTime = obj.t, this._isShared = obj.sh, this._cacheHeuristic = obj.ch, this._immutableMinTtl = obj.imm !== void 0 ? obj.imm : 24 * 3600 * 1e3, this._status = obj.st, this._resHeaders = obj.resh, this._rescc = obj.rescc, this._method = obj.m, this._url = obj.u, this._host = obj.h, this._noAuthorization = obj.a, this._reqHeaders = obj.reqh, this._reqcc = obj.reqcc; | ||
@@ -313,4 +310,3 @@ } | ||
} | ||
} else | ||
this._resHeaders["last-modified"] && !headers["if-modified-since"] && (headers["if-modified-since"] = this._resHeaders["last-modified"]); | ||
} else this._resHeaders["last-modified"] && !headers["if-modified-since"] && (headers["if-modified-since"] = this._resHeaders["last-modified"]); | ||
return headers; | ||
@@ -391,3 +387,4 @@ } | ||
MAX_VALUE_SIZE_TEST: 1024, | ||
MAX_METADATA_SIZE: 1024 | ||
MAX_METADATA_SIZE: 1024, | ||
MAX_BULK_SIZE: 25 * 1024 * 1024 | ||
}; | ||
@@ -484,4 +481,3 @@ var SITES_NO_CACHE_PREFIX = "$__MINIFLARE_SITES__$/"; | ||
let result = {}; | ||
for (let [key, value] of headers) | ||
result[key.toLowerCase()] = value; | ||
for (let [key, value] of headers) result[key.toLowerCase()] = value; | ||
return result; | ||
@@ -527,4 +523,3 @@ } | ||
} | ||
var CR = "\r".charCodeAt(0), LF = ` | ||
`.charCodeAt(0), STATUS_REGEXP = /^HTTP\/\d(?:\.\d)? (?<rawStatusCode>\d+) (?<statusText>.*)$/; | ||
var CR = 13, LF = 10, STATUS_REGEXP = /^HTTP\/\d(?:\.\d)? (?<rawStatusCode>\d+) (?<statusText>.*)$/; | ||
async function parseHttpResponse(stream) { | ||
@@ -535,4 +530,3 @@ let buffer = Buffer2.alloc(0), blankLineIndex = -1; | ||
(_value, index) => buffer[index] === CR && buffer[index + 1] === LF && buffer[index + 2] === CR && buffer[index + 3] === LF | ||
), blankLineIndex !== -1) | ||
break; | ||
), blankLineIndex !== -1) break; | ||
assert(blankLineIndex !== -1, "Expected to find blank line in HTTP message"); | ||
@@ -585,4 +579,3 @@ let rawStatusHeaders = buffer.subarray(0, blankLineIndex).toString(), [rawStatus, ...rawHeaders] = rawStatusHeaders.split(`\r | ||
let cacheKey = getCacheKey(req); | ||
if (isSitesRequest(req)) | ||
throw new CacheMiss(); | ||
if (isSitesRequest(req)) throw new CacheMiss(); | ||
let resHeaders, resRanges, cached = await this.storage.get(cacheKey, ({ size, headers }) => { | ||
@@ -599,4 +592,3 @@ resHeaders = new Headers(headers); | ||
}); | ||
if (cached?.metadata === void 0) | ||
throw new CacheMiss(); | ||
if (cached?.metadata === void 0) throw new CacheMiss(); | ||
return assert(resHeaders !== void 0), resHeaders.set("CF-Cache-Status", "HIT"), resRanges ??= [], getMatchResponse(req.headers, { | ||
@@ -613,4 +605,3 @@ status: cached.metadata.status, | ||
let cacheKey = getCacheKey(req); | ||
if (isSitesRequest(req)) | ||
throw new CacheMiss(); | ||
if (isSitesRequest(req)) throw new CacheMiss(); | ||
assert(req.body !== null); | ||
@@ -652,4 +643,3 @@ let res = await parseHttpResponse(req.body), body = res.body; | ||
let cacheKey = getCacheKey(req); | ||
if (!await this.storage.delete(cacheKey)) | ||
throw new PurgeFailure(); | ||
if (!await this.storage.delete(cacheKey)) throw new PurgeFailure(); | ||
return new Response(null); | ||
@@ -656,0 +646,0 @@ }; |
@@ -31,4 +31,3 @@ var __defProp = Object.defineProperty; | ||
for (let { name: table, sql } of tables) { | ||
if (filterTables.size > 0 && !filterTables.has(table)) | ||
continue; | ||
if (filterTables.size > 0 && !filterTables.has(table)) continue; | ||
if (table === "sqlite_sequence") | ||
@@ -47,4 +46,3 @@ noSchema || (yield "DELETE FROM sqlite_sequence;"); | ||
} | ||
if (noData) | ||
continue; | ||
if (noData) continue; | ||
let columns_cursor = db.exec(`PRAGMA table_info="${table}"`), columns = Array.from(columns_cursor), select = `SELECT ${columns.map((c) => c.name).join(", ")} | ||
@@ -55,3 +53,3 @@ FROM "${table}";`, rows_cursor = db.exec(select); | ||
let colType = columns[i].type, cellType = typeof cell; | ||
return cell === null ? "NULL" : colType === "INTEGER" || cellType === "number" ? cell : colType === "TEXT" || cellType === "string" ? outputQuotedEscapedString(cell) : cell instanceof ArrayBuffer ? `X'${Array.prototype.map.call(new Uint8Array(cell), (b) => b.toString(16)).join("")}'` : (console.log({ colType, cellType, cell, column: columns[i] }), "ERROR"); | ||
return cell === null ? "NULL" : colType === "INTEGER" || cellType === "number" ? cell : colType === "TEXT" || cellType === "string" ? outputQuotedEscapedString(cell) : cell instanceof ArrayBuffer ? `X'${Array.prototype.map.call(new Uint8Array(cell), (b) => b.toString(16).padStart(2, "0")).join("")}'` : (console.log({ colType, cellType, cell, column: columns[i] }), "ERROR"); | ||
}); | ||
@@ -239,3 +237,3 @@ yield `INSERT INTO ${sqliteQuote(table)} VALUES(${formattedCells.join(",")});`; | ||
params: z.array(D1ValueSchema).nullable().optional() | ||
}), D1QueriesSchema = z.union([D1QuerySchema, z.array(D1QuerySchema)]), D1_EXPORT_PRAGMA = "PRAGMA miniflare_d1_export(?,?,?);", D1ResultsFormatSchema = z.enum(["ARRAY_OF_OBJECTS", "ROWS_AND_COLUMNS", "NONE"]).catch("ARRAY_OF_OBJECTS"), served_by = "miniflare.db", D1Error = class extends HttpError { | ||
}), D1QueriesSchema = z.union([D1QuerySchema, z.array(D1QuerySchema)]), D1_EXPORT_PRAGMA = "PRAGMA miniflare_d1_export(?,?,?);", D1ResultsFormatSchema = z.enum(["ARRAY_OF_OBJECTS", "ROWS_AND_COLUMNS", "NONE"]).catch("ARRAY_OF_OBJECTS"), served_by = "miniflare.db", D1_SESSION_COMMIT_TOKEN_HTTP_HEADER = "x-cf-d1-session-commit-token", D1Error = class extends HttpError { | ||
constructor(cause) { | ||
@@ -328,3 +326,7 @@ super(500); | ||
); | ||
return Response.json(this.#txn(queries, resultsFormat)); | ||
return Response.json(this.#txn(queries, resultsFormat), { | ||
headers: { | ||
[D1_SESSION_COMMIT_TOKEN_HTTP_HEADER]: await this.state.storage.getCurrentBookmark() | ||
} | ||
}); | ||
}; | ||
@@ -331,0 +333,0 @@ #isExportPragma(queries) { |
@@ -19,2 +19,3 @@ var __defProp = Object.defineProperty; | ||
MiniflareDurableObject, | ||
POST, | ||
PUT | ||
@@ -31,3 +32,4 @@ } from "miniflare:shared"; | ||
MAX_VALUE_SIZE_TEST: 1024, | ||
MAX_METADATA_SIZE: 1024 | ||
MAX_METADATA_SIZE: 1024, | ||
MAX_BULK_SIZE: 25 * 1024 * 1024 | ||
}, KVParams = { | ||
@@ -45,2 +47,3 @@ URL_ENCODED: "urlencoded", | ||
}; | ||
var MAX_BULK_GET_KEYS = 100; | ||
@@ -51,4 +54,3 @@ // src/workers/kv/validator.worker.ts | ||
function decodeKey({ key }, query) { | ||
if (query.get(KVParams.URL_ENCODED)?.toLowerCase() !== "true") | ||
return key; | ||
if (query.get(KVParams.URL_ENCODED)?.toLowerCase() !== "true") return key; | ||
try { | ||
@@ -178,2 +180,26 @@ return decodeURIComponent(key); | ||
} | ||
async function processKeyValue(obj, type = "text", withMetadata = !1) { | ||
let decoder = new TextDecoder(), decodedValue = ""; | ||
if (obj?.value) { | ||
for await (let chunk of obj?.value) | ||
decodedValue += decoder.decode(chunk, { stream: !0 }); | ||
decodedValue += decoder.decode(); | ||
} | ||
let val = null, size = decodedValue.length; | ||
try { | ||
val = obj?.value ? type === "json" ? JSON.parse(decodedValue) : decodedValue : null; | ||
} catch { | ||
throw new HttpError2( | ||
400, | ||
`At least one of the requested keys corresponds to a non-${type} value` | ||
); | ||
} | ||
return val && withMetadata ? [ | ||
{ | ||
value: val, | ||
metadata: obj?.metadata ?? null | ||
}, | ||
size | ||
] : [val, size]; | ||
} | ||
var KVNamespaceObject = class extends MiniflareDurableObject { | ||
@@ -185,7 +211,43 @@ #storage; | ||
get = async (req, params, url) => { | ||
if (req.method === "POST" && req.body != null) { | ||
let decodedBody = "", decoder = new TextDecoder(); | ||
for await (let chunk of req.body) | ||
decodedBody += decoder.decode(chunk, { stream: !0 }); | ||
decodedBody += decoder.decode(); | ||
let parsedBody = JSON.parse(decodedBody), keys = parsedBody.keys, type = parsedBody?.type; | ||
if (type && type !== "text" && type !== "json") { | ||
let errorStr = `"${type}" is not a valid type. Use "json" or "text"`; | ||
return new Response(errorStr, { status: 400, statusText: errorStr }); | ||
} | ||
let obj = {}; | ||
if (keys.length > MAX_BULK_GET_KEYS) { | ||
let errorStr = `You can request a maximum of ${MAX_BULK_GET_KEYS} keys`; | ||
return new Response(errorStr, { status: 400, statusText: errorStr }); | ||
} | ||
if (keys.length < 1) { | ||
let errorStr = "You must request a minimum of 1 key"; | ||
return new Response(errorStr, { status: 400, statusText: errorStr }); | ||
} | ||
let totalBytes = 0; | ||
for (let key2 of keys) { | ||
validateGetOptions(key2, { cacheTtl: parsedBody?.cacheTtl }); | ||
let entry2 = await this.storage.get(key2), [value, size] = await processKeyValue( | ||
entry2, | ||
parsedBody?.type, | ||
parsedBody?.withMetadata | ||
); | ||
totalBytes += size, obj[key2] = value; | ||
} | ||
let maxValueSize = this.beingTested ? KVLimits.MAX_VALUE_SIZE_TEST : KVLimits.MAX_BULK_SIZE; | ||
if (totalBytes > maxValueSize) | ||
throw new HttpError2( | ||
413, | ||
`Total size of request exceeds the limit of ${maxValueSize / 1024 / 1024}MB` | ||
); | ||
return new Response(JSON.stringify(obj)); | ||
} | ||
let key = decodeKey(params, url.searchParams), cacheTtlParam = url.searchParams.get(KVParams.CACHE_TTL), cacheTtl = cacheTtlParam === null ? void 0 : parseInt(cacheTtlParam); | ||
validateGetOptions(key, { cacheTtl }); | ||
let entry = await this.storage.get(key); | ||
if (entry === null) | ||
throw new HttpError2(404, "Not Found"); | ||
if (entry === null) throw new HttpError2(404, "Not Found"); | ||
let headers = new Headers(); | ||
@@ -253,3 +315,4 @@ return entry.expiration !== void 0 && headers.set( | ||
__decorateClass([ | ||
GET("/:key") | ||
GET("/:key"), | ||
POST("/bulk/get") | ||
], KVNamespaceObject.prototype, "get", 2), __decorateClass([ | ||
@@ -256,0 +319,0 @@ PUT("/:key") |
@@ -12,3 +12,4 @@ // src/workers/kv/sites.worker.ts | ||
MAX_VALUE_SIZE_TEST: 1024, | ||
MAX_METADATA_SIZE: 1024 | ||
MAX_METADATA_SIZE: 1024, | ||
MAX_BULK_SIZE: 25 * 1024 * 1024 | ||
}, KVParams = { | ||
@@ -106,6 +107,4 @@ URL_ENCODED: "urlencoded", | ||
let aElement = a[i], bElement = b[i]; | ||
if (aElement < bElement) | ||
return -1; | ||
if (aElement > bElement) | ||
return 1; | ||
if (aElement < bElement) return -1; | ||
if (aElement > bElement) return 1; | ||
} | ||
@@ -121,4 +120,3 @@ return a.length - b.length; | ||
keys.sort((a, b) => arrayCompare(a.encodedName, b.encodedName)); | ||
for (let key of keys) | ||
delete key.encodedName; | ||
for (let key of keys) delete key.encodedName; | ||
let startAfter = cursor === void 0 ? "" : base64Decode(cursor), startIndex = 0; | ||
@@ -125,0 +123,0 @@ startAfter !== "" && (startIndex = keys.findIndex(({ name }) => name === startAfter), startIndex === -1 && (startIndex = keys.length), startIndex++); |
@@ -57,8 +57,10 @@ var __defProp = Object.defineProperty; | ||
maxBatchSize: z.number().min(0).max(100).optional(), | ||
maxBatchTimeout: z.number().min(0).max(30).optional(), | ||
maxBatchTimeout: z.number().min(0).max(60).optional(), | ||
// seconds | ||
maxRetires: z.number().min(0).max(100).optional(), | ||
// deprecated | ||
maxRetries: z.number().min(0).max(100).optional(), | ||
deadLetterQueue: z.ostring(), | ||
retryDelay: QueueMessageDelaySchema | ||
}), QueueConsumerSchema = /* @__PURE__ */ z.intersection( | ||
}).transform((queue) => (queue.maxRetires !== void 0 && (queue.maxRetries = queue.maxRetires), queue)), QueueConsumerSchema = /* @__PURE__ */ z.intersection( | ||
QueueConsumerOptionsSchema, | ||
@@ -79,3 +81,3 @@ z.object({ workerName: z.string() }) | ||
// src/workers/queues/broker.worker.ts | ||
var MAX_MESSAGE_SIZE_BYTES = 128 * 1e3, MAX_MESSAGE_BATCH_COUNT = 100, MAX_MESSAGE_BATCH_SIZE = (256 + 32) * 1e3, DEFAULT_BATCH_SIZE = 5, DEFAULT_BATCH_TIMEOUT = 1, DEFAULT_RETRIES = 2, exceptionQueueResponse = { | ||
var MAX_MESSAGE_SIZE_BYTES = 128 * 1e3, MAX_MESSAGE_BATCH_COUNT = 100, MAX_MESSAGE_BATCH_SIZE = 288 * 1e3, DEFAULT_BATCH_SIZE = 5, DEFAULT_BATCH_TIMEOUT = 1, DEFAULT_RETRIES = 2, exceptionQueueResponse = { | ||
outcome: "exception", | ||
@@ -109,4 +111,3 @@ retryBatch: { retry: !1 }, | ||
let format = headers.get("X-Msg-Delay-Secs"); | ||
if (!format) | ||
return; | ||
if (!format) return; | ||
let result = QueueMessageDelaySchema.safeParse(Number(format)); | ||
@@ -204,3 +205,3 @@ if (!result.success) | ||
assert(consumer !== void 0); | ||
let batchSize = consumer.maxBatchSize ?? DEFAULT_BATCH_SIZE, maxAttempts = (consumer.maxRetires ?? DEFAULT_RETRIES) + 1, maxAttemptsS = maxAttempts === 1 ? "" : "s", batch = this.#messages.splice(0, batchSize), startTime = Date.now(), endTime, response; | ||
let batchSize = consumer.maxBatchSize ?? DEFAULT_BATCH_SIZE, maxAttempts = (consumer.maxRetries ?? DEFAULT_RETRIES) + 1, maxAttemptsS = maxAttempts === 1 ? "" : "s", batch = this.#messages.splice(0, batchSize), startTime = Date.now(), endTime, response; | ||
try { | ||
@@ -225,10 +226,9 @@ response = await this.#dispatchBatch(consumer.workerName, batch), endTime = Date.now(); | ||
this.timers.setTimeout(fn, delay * 1e3); | ||
} else | ||
consumer.deadLetterQueue !== void 0 ? (await this.logWithLevel( | ||
LogLevel.WARN, | ||
`Moving message "${message.id}" on queue "${this.name}" to dead letter queue "${consumer.deadLetterQueue}" after ${maxAttempts} failed attempt${maxAttemptsS}...` | ||
), toDeadLetterQueue.push(message)) : await this.logWithLevel( | ||
LogLevel.WARN, | ||
`Dropped message "${message.id}" on queue "${this.name}" after ${maxAttempts} failed attempt${maxAttemptsS}!` | ||
); | ||
} else consumer.deadLetterQueue !== void 0 ? (await this.logWithLevel( | ||
LogLevel.WARN, | ||
`Moving message "${message.id}" on queue "${this.name}" to dead letter queue "${consumer.deadLetterQueue}" after ${maxAttempts} failed attempt${maxAttemptsS}...` | ||
), toDeadLetterQueue.push(message)) : await this.logWithLevel( | ||
LogLevel.WARN, | ||
`Dropped message "${message.id}" on queue "${this.name}" after ${maxAttempts} failed attempt${maxAttemptsS}!` | ||
); | ||
let acked = batch.length - failedMessages; | ||
@@ -256,4 +256,3 @@ if (await this.logWithLevel( | ||
if (this.#pendingFlush !== void 0) { | ||
if (this.#pendingFlush.immediate || batchHasSpace) | ||
return; | ||
if (this.#pendingFlush.immediate || batchHasSpace) return; | ||
this.timers.clearTimeout(this.#pendingFlush.timeout), this.#pendingFlush = void 0; | ||
@@ -273,4 +272,3 @@ } | ||
message = async (req) => { | ||
if (this.#maybeConsumer === void 0) | ||
return new Response(); | ||
if (this.#maybeConsumer === void 0) return new Response(); | ||
validateMessageSize(req.headers); | ||
@@ -284,4 +282,3 @@ let contentType = validateContentType(req.headers), delay = validateMessageDelay(req.headers) ?? this.#maybeProducer?.deliveryDelay, body = Buffer2.from(await req.arrayBuffer()); | ||
batch = async (req) => { | ||
if (this.#maybeConsumer === void 0) | ||
return new Response(); | ||
if (this.#maybeConsumer === void 0) return new Response(); | ||
validateBatchSize(req.headers); | ||
@@ -288,0 +285,0 @@ let delay = validateMessageDelay(req.headers) ?? this.#maybeProducer?.deliveryDelay, body = QueuesBatchRequestSchema.parse(await req.json()); |
@@ -494,4 +494,3 @@ var __defProp = Object.defineProperty; | ||
for (let i = 0; i < x.length; i++) | ||
if (x.charCodeAt(i) >= 256) | ||
return x.length * 2; | ||
if (x.charCodeAt(i) >= 256) return x.length * 2; | ||
return x.length; | ||
@@ -521,13 +520,10 @@ } | ||
let ranges = parseRanges(options.rangeHeader, size); | ||
if (ranges?.length === 1) | ||
return ranges[0]; | ||
if (ranges?.length === 1) return ranges[0]; | ||
} else if (options.range !== void 0) { | ||
let { offset, length, suffix } = options.range; | ||
if (suffix !== void 0) { | ||
if (suffix <= 0) | ||
throw new InvalidRange(); | ||
if (suffix <= 0) throw new InvalidRange(); | ||
suffix > size && (suffix = size), offset = size - suffix, length = suffix; | ||
} | ||
if (offset === void 0 && (offset = 0), length === void 0 && (length = size - offset), offset < 0 || offset > size || length <= 0) | ||
throw new InvalidRange(); | ||
if (offset === void 0 && (offset = 0), length === void 0 && (length = size - offset), offset < 0 || offset > size || length <= 0) throw new InvalidRange(); | ||
return offset + length > size && (length = size - offset), { start: offset, end: offset + length - 1 }; | ||
@@ -542,4 +538,3 @@ } | ||
metadataSize(customMetadata) { | ||
if (customMetadata === void 0) | ||
return this; | ||
if (customMetadata === void 0) return this; | ||
let metadataLength = 0; | ||
@@ -574,4 +569,3 @@ for (let [key, value] of Object.entries(customMetadata)) | ||
async transform(chunk, controller) { | ||
for (let hash of hashes) | ||
await hash.writer.write(chunk); | ||
for (let hash of hashes) await hash.writer.write(chunk); | ||
controller.enqueue(chunk); | ||
@@ -600,4 +594,3 @@ }, | ||
let hash = createHash("md5"); | ||
for (let md5Hex of md5Hexes) | ||
hash.update(md5Hex, "hex"); | ||
for (let md5Hex of md5Hexes) hash.update(md5Hex, "hex"); | ||
return `${hash.digest("hex")}-${md5Hexes.length}`; | ||
@@ -610,4 +603,3 @@ } | ||
let metadataSize = parseInt(req.headers.get(R2Headers.METADATA_SIZE)); | ||
if (Number.isNaN(metadataSize)) | ||
throw new InvalidMetadata(); | ||
if (Number.isNaN(metadataSize)) throw new InvalidMetadata(); | ||
assert2(req.body !== null); | ||
@@ -619,4 +611,3 @@ let body = req.body, [metadataBuffer, value] = await readPrefix(body, metadataSize), metadataJson = decoder.decode(metadataBuffer); | ||
let header = req.headers.get(R2Headers.REQUEST); | ||
if (header === null) | ||
throw new InvalidMetadata(); | ||
if (header === null) throw new InvalidMetadata(); | ||
return R2BindingRequestSchema.parse(JSON.parse(header)); | ||
@@ -730,6 +721,4 @@ } | ||
let partRows = stmtDeletePartsByKey({ object_key: key }); | ||
for (let partRow of partRows) | ||
oldBlobIds.push(partRow.blob_id); | ||
} else | ||
maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId); | ||
for (let partRow of partRows) oldBlobIds.push(partRow.blob_id); | ||
} else maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId); | ||
} | ||
@@ -799,4 +788,3 @@ return oldBlobIds; | ||
for (let { part } of selectedParts) { | ||
if (partNumberSet.has(part)) | ||
throw new InternalError(); | ||
if (partNumberSet.has(part)) throw new InternalError(); | ||
partNumberSet.add(part); | ||
@@ -826,6 +814,4 @@ } | ||
let partRows2 = stmtDeletePartsByKey({ object_key: key }); | ||
for (let partRow of partRows2) | ||
oldBlobIds.push(partRow.blob_id); | ||
} else | ||
maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId); | ||
for (let partRow of partRows2) oldBlobIds.push(partRow.blob_id); | ||
} else maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId); | ||
let totalSize = parts.reduce((acc, { size }) => acc + size, 0), etag = generateMultipartEtag( | ||
@@ -852,4 +838,3 @@ parts.map(({ checksum_md5 }) => checksum_md5) | ||
let partRows = stmtDeleteUnlinkedPartsByUploadId({ upload_id }); | ||
for (let partRow of partRows) | ||
oldBlobIds.push(partRow.blob_id); | ||
for (let partRow of partRows) oldBlobIds.push(partRow.blob_id); | ||
return stmtUpdateUploadState({ | ||
@@ -946,4 +931,3 @@ upload_id, | ||
let row = get(this.#stmts.getByKey({ key })); | ||
if (row === void 0) | ||
throw new NoSuchKey(); | ||
if (row === void 0) throw new NoSuchKey(); | ||
let range = { offset: 0, length: row.size }; | ||
@@ -955,4 +939,3 @@ return new InternalR2Object(row, range); | ||
let result = this.#stmts.getPartsByKey(key); | ||
if (result === void 0) | ||
throw new NoSuchKey(); | ||
if (result === void 0) throw new NoSuchKey(); | ||
let { row, parts } = result, defaultR2Range = { offset: 0, length: row.size }; | ||
@@ -976,4 +959,3 @@ try { | ||
value = this.#assembleMultipartValue(parts, range ?? defaultRange); | ||
} else if (value = await this.blob.get(row.blob_id, range), value === null) | ||
throw new NoSuchKey(); | ||
} else if (value = await this.blob.get(row.blob_id, range), value === null) throw new NoSuchKey(); | ||
return new InternalR2ObjectBody(row, value, r2Range); | ||
@@ -1004,4 +986,3 @@ } | ||
if (oldBlobIds !== void 0) | ||
for (let blobId2 of oldBlobIds) | ||
this.#backgroundDelete(blobId2); | ||
for (let blobId2 of oldBlobIds) this.#backgroundDelete(blobId2); | ||
return new InternalR2Object(row); | ||
@@ -1011,7 +992,5 @@ } | ||
Array.isArray(keys) || (keys = [keys]); | ||
for (let key of keys) | ||
validate.key(key); | ||
for (let key of keys) validate.key(key); | ||
let oldBlobIds = this.#stmts.deleteByKeys(keys); | ||
for (let blobId of oldBlobIds) | ||
this.#backgroundDelete(blobId); | ||
for (let blobId of oldBlobIds) this.#backgroundDelete(blobId); | ||
} | ||
@@ -1096,4 +1075,3 @@ #listWithoutDelimiterQuery(excludeHttp, excludeCustom) { | ||
); | ||
for (let blobId of oldBlobIds) | ||
this.#backgroundDelete(blobId); | ||
for (let blobId of oldBlobIds) this.#backgroundDelete(blobId); | ||
return new InternalR2Object(newRow); | ||
@@ -1104,4 +1082,3 @@ } | ||
let oldBlobIds = this.#stmts.abortMultipartUpload(key, uploadId); | ||
for (let blobId of oldBlobIds) | ||
this.#backgroundDelete(blobId); | ||
for (let blobId of oldBlobIds) this.#backgroundDelete(blobId); | ||
} | ||
@@ -1108,0 +1085,0 @@ get = async (req) => { |
@@ -8,3 +8,6 @@ // src/workers/shared/blob.worker.ts | ||
function viewToBuffer(view) { | ||
return view.buffer.slice(view.byteOffset, view.byteOffset + view.byteLength); | ||
return view.buffer.slice( | ||
view.byteOffset, | ||
view.byteOffset + view.byteLength | ||
); | ||
} | ||
@@ -50,4 +53,3 @@ function base64Encode(value) { | ||
let headers = range === void 0 ? {} : rangeHeaders(range), res = await fetcher.fetch(url, { headers }); | ||
if (res.status === 404) | ||
return null; | ||
if (res.status === 404) return null; | ||
if (assert(res.ok && res.body !== null), range !== void 0 && res.status !== 206) { | ||
@@ -86,4 +88,3 @@ let contentLength = parseInt(res.headers.get("Content-Length")); | ||
let res = await fetcher.fetch(url, { method: "HEAD" }); | ||
if (res.status === 404) | ||
return null; | ||
if (res.status === 404) return null; | ||
assert(res.ok); | ||
@@ -151,7 +152,5 @@ let contentLength = parseInt(res.headers.get("Content-Length")); | ||
async delete(id) { | ||
if (this.#stickyBlobs) | ||
return; | ||
if (this.#stickyBlobs) return; | ||
let idURL = this.idURL(id); | ||
if (idURL === null) | ||
return; | ||
if (idURL === null) return; | ||
let res = await this.#fetcher.fetch(idURL, { method: "DELETE" }); | ||
@@ -215,4 +214,3 @@ assert(res.ok || res.status === 404); | ||
let result; | ||
for (let row of cursor) | ||
result ??= row; | ||
for (let row of cursor) result ??= row; | ||
return result; | ||
@@ -294,4 +292,3 @@ } | ||
let row = get(this.#stmts.getByKey(key)); | ||
if (row === void 0) | ||
return null; | ||
if (row === void 0) return null; | ||
if (this.#hasExpired(row)) | ||
@@ -331,4 +328,3 @@ return drain(this.#stmts.deleteByKey({ key })), this.#backgroundDelete(row.blob_id), null; | ||
}), rows = Array.from(rowsCursor), expiredRows = this.#stmts.deleteExpired({ now }); | ||
for (let row of expiredRows) | ||
this.#backgroundDelete(row.blob_id); | ||
for (let row of expiredRows) this.#backgroundDelete(row.blob_id); | ||
let hasMoreRows = rows.length === opts.limit + 1; | ||
@@ -343,8 +339,4 @@ rows.splice(opts.limit, 1); | ||
function testRegExps(matcher, value) { | ||
for (let exclude of matcher.exclude) | ||
if (exclude.test(value)) | ||
return !1; | ||
for (let include of matcher.include) | ||
if (include.test(value)) | ||
return !0; | ||
for (let exclude of matcher.exclude) if (exclude.test(value)) return !1; | ||
for (let include of matcher.include) if (include.test(value)) return !0; | ||
return !1; | ||
@@ -378,4 +370,3 @@ } | ||
let url = new URL(req.url), methodRoutes = this.#routes?.get(req.method); | ||
if (methodRoutes === void 0) | ||
return new Response(null, { status: 405 }); | ||
if (methodRoutes === void 0) return new Response(null, { status: 405 }); | ||
let handlers = this; | ||
@@ -385,4 +376,3 @@ try { | ||
let match = path.exec(url.pathname); | ||
if (match !== null) | ||
return await handlers[key](req, match.groups, url); | ||
if (match !== null) return await handlers[key](req, match.groups, url); | ||
} | ||
@@ -431,9 +421,7 @@ return new Response(null, { status: 404 }); | ||
clearTimeout(handle) { | ||
if (typeof handle == "number") | ||
return clearTimeout(handle); | ||
if (typeof handle == "number") return clearTimeout(handle); | ||
this.#fakePendingTimeouts.delete(handle[kFakeTimerHandle]); | ||
} | ||
queueMicrotask(closure) { | ||
if (this.#fakeTimestamp === void 0) | ||
return queueMicrotask(closure); | ||
if (this.#fakeTimestamp === void 0) return queueMicrotask(closure); | ||
let result = closure(); | ||
@@ -503,4 +491,3 @@ result instanceof Promise && (this.#fakeRunningTasks.add(result), result.finally(() => this.#fakeRunningTasks.delete(result))); | ||
get blob() { | ||
if (this.#blob !== void 0) | ||
return this.#blob; | ||
if (this.#blob !== void 0) return this.#blob; | ||
let maybeBlobsService = this.env[SharedBindings.MAYBE_SERVICE_BLOBS], stickyBlobs = !!this.env[SharedBindings.MAYBE_JSON_ENABLE_STICKY_BLOBS]; | ||
@@ -548,4 +535,3 @@ return assert5( | ||
let controlOp = req?.cf?.miniflare?.controlOp; | ||
if (controlOp !== void 0) | ||
return this.#handleControlOp(controlOp); | ||
if (controlOp !== void 0) return this.#handleControlOp(controlOp); | ||
} | ||
@@ -574,28 +560,21 @@ let name = req.cf?.miniflare?.name; | ||
let prefixMatch = rangePrefixRegexp.exec(rangeHeader); | ||
if (prefixMatch === null) | ||
return; | ||
if (rangeHeader = rangeHeader.substring(prefixMatch[0].length), rangeHeader.trimStart() === "") | ||
return []; | ||
if (prefixMatch === null) return; | ||
if (rangeHeader = rangeHeader.substring(prefixMatch[0].length), rangeHeader.trimStart() === "") return []; | ||
let ranges = rangeHeader.split(","), result = []; | ||
for (let range of ranges) { | ||
let match = rangeRegexp.exec(range); | ||
if (match === null) | ||
return; | ||
if (match === null) return; | ||
let { start, end } = match.groups; | ||
if (start !== void 0 && end !== void 0) { | ||
let rangeStart = parseInt(start), rangeEnd = parseInt(end); | ||
if (rangeStart > rangeEnd || rangeStart >= length) | ||
return; | ||
if (rangeStart > rangeEnd || rangeStart >= length) return; | ||
rangeEnd >= length && (rangeEnd = length - 1), result.push({ start: rangeStart, end: rangeEnd }); | ||
} else if (start !== void 0 && end === void 0) { | ||
let rangeStart = parseInt(start); | ||
if (rangeStart >= length) | ||
return; | ||
if (rangeStart >= length) return; | ||
result.push({ start: rangeStart, end: length - 1 }); | ||
} else if (start === void 0 && end !== void 0) { | ||
let suffix = parseInt(end); | ||
if (suffix >= length) | ||
return []; | ||
if (suffix === 0) | ||
continue; | ||
if (suffix >= length) return []; | ||
if (suffix === 0) continue; | ||
result.push({ start: length - suffix, end: length - 1 }); | ||
@@ -635,4 +614,3 @@ } else | ||
let resolve; | ||
for (; (resolve = this.drainQueue.shift()) !== void 0; ) | ||
resolve(); | ||
for (; (resolve = this.drainQueue.shift()) !== void 0; ) resolve(); | ||
} | ||
@@ -666,4 +644,3 @@ } | ||
let resolve; | ||
for (; (resolve = this.resolveQueue.shift()) !== void 0; ) | ||
resolve(); | ||
for (; (resolve = this.resolveQueue.shift()) !== void 0; ) resolve(); | ||
} | ||
@@ -670,0 +647,0 @@ } |
{ | ||
"name": "miniflare", | ||
"version": "0.0.0-701d41ee8", | ||
"version": "0.0.0-7028c3705", | ||
"description": "Fun, full-featured, fully-local simulator for Cloudflare Workers", | ||
@@ -34,18 +34,18 @@ "keywords": [ | ||
"@cspotcode/source-map-support": "0.8.1", | ||
"acorn": "^8.8.0", | ||
"acorn-walk": "^8.2.0", | ||
"capnp-ts": "^0.7.0", | ||
"exit-hook": "^2.2.1", | ||
"glob-to-regexp": "^0.4.1", | ||
"stoppable": "^1.1.0", | ||
"undici": "^5.28.4", | ||
"workerd": "1.20240925.0", | ||
"ws": "^8.17.1", | ||
"youch": "^3.2.2", | ||
"zod": "^3.22.3" | ||
"acorn": "8.14.0", | ||
"acorn-walk": "8.3.2", | ||
"exit-hook": "2.2.1", | ||
"glob-to-regexp": "0.4.1", | ||
"sharp": "^0.33.5", | ||
"stoppable": "1.1.0", | ||
"undici": "^5.28.5", | ||
"workerd": "1.20250525.0", | ||
"ws": "8.18.0", | ||
"youch": "3.3.4", | ||
"zod": "3.22.3" | ||
}, | ||
"devDependencies": { | ||
"@ava/typescript": "^4.1.0", | ||
"@cloudflare/workers-types": "^4.20240925.0", | ||
"@microsoft/api-extractor": "^7.47.0", | ||
"@cloudflare/workers-types": "^4.20250525.0", | ||
"@microsoft/api-extractor": "7.49.1", | ||
"@types/debug": "^4.1.7", | ||
@@ -56,4 +56,3 @@ "@types/estree": "^1.0.0", | ||
"@types/mime": "^3.0.4", | ||
"@types/node": "20.8.3", | ||
"@types/rimraf": "^4.0.5", | ||
"@types/node": "^20.17.32", | ||
"@types/stoppable": "^1.1.1", | ||
@@ -65,25 +64,30 @@ "@types/which": "^2.0.1", | ||
"ava": "^6.0.1", | ||
"capnpc-ts": "^0.7.0", | ||
"capnp-es": "^0.0.7", | ||
"concurrently": "^8.2.2", | ||
"devalue": "^4.3.0", | ||
"devtools-protocol": "^0.0.1182435", | ||
"esbuild": "0.17.19", | ||
"eslint": "^8.49.0", | ||
"esbuild": "0.25.4", | ||
"eslint": "^8.57.1", | ||
"eslint-config-prettier": "^9.0.0", | ||
"eslint-plugin-es": "^4.1.0", | ||
"eslint-plugin-import": "2.26.x", | ||
"eslint-plugin-prettier": "^5.0.1", | ||
"expect-type": "^0.15.0", | ||
"get-port": "^7.1.0", | ||
"heap-js": "^2.5.0", | ||
"http-cache-semantics": "^4.1.0", | ||
"kleur": "^4.1.5", | ||
"mime": "^3.0.0", | ||
"postal-mime": "^2.4.3", | ||
"pretty-bytes": "^6.0.0", | ||
"rimraf": "^5.0.1", | ||
"rimraf": "^6.0.1", | ||
"source-map": "^0.6.1", | ||
"ts-dedent": "^2.2.0", | ||
"typescript": "^5.7.2", | ||
"which": "^2.0.2", | ||
"@cloudflare/workers-shared": "0.0.0-701d41ee8", | ||
"@cloudflare/kv-asset-handler": "0.3.4" | ||
"@cloudflare/kv-asset-handler": "0.4.0", | ||
"@cloudflare/workers-shared": "0.0.0-7028c3705", | ||
"@cloudflare/workflows-shared": "0.3.4" | ||
}, | ||
"engines": { | ||
"node": ">=16.13" | ||
"node": ">=18.0.0" | ||
}, | ||
@@ -101,3 +105,3 @@ "volta": { | ||
"build": "node scripts/build.mjs && pnpm run types:build", | ||
"capnp:workerd": "capnpc -o ts src/runtime/config/workerd.capnp", | ||
"capnp:workerd": "node scripts/build-capnp.mjs", | ||
"check:lint": "eslint --max-warnings=0 \"{src,test}/**/*.ts\" \"scripts/**/*.{js,mjs}\" \"types/**/*.ts\"", | ||
@@ -108,3 +112,4 @@ "check:type": "tsc", | ||
"lint:fix": "pnpm run check:lint --fix", | ||
"test": "node scripts/build.mjs && ava && rimraf ./.tmp", | ||
"test": "ava", | ||
"posttest": "rimraf ./.tmp", | ||
"test:ci": "pnpm run test", | ||
@@ -111,0 +116,0 @@ "types:build": "node scripts/types.mjs tsconfig.json && node scripts/types.mjs src/workers/tsconfig.json" |
@@ -110,7 +110,9 @@ # 🔥 Miniflare | ||
- If this is `undefined` or `false`, data will be stored in-memory and only | ||
- If this is `undefined`, it defaults to `true` if `defaultPersistRoot` is set | ||
or otherwise defaults to `false`. | ||
- If this is`false`, data will be stored in-memory and only | ||
persist between `Miniflare#setOptions()` calls, not restarts nor | ||
`new Miniflare` instances. | ||
- If this is `true`, data will be stored on the file-system, in the `$PWD/.mf` | ||
directory. | ||
- If this is `true`, data will be stored in a subdirectory of the `defaultPersistRoot` path if `defaultPersistRoot` is set | ||
or otherwise will be stored in a subdirectory of `$PWD/.mf`. | ||
- If this looks like a URL, then: | ||
@@ -218,2 +220,16 @@ - If the protocol is `memory:`, data will be stored in-memory as above. | ||
### `interface WorkflowOptions` | ||
- `name: string` | ||
The name of the Workflow. | ||
- `className: string` | ||
The name of the class exported from the Worker that implements the `WorkflowEntrypoint`. | ||
- `scriptName?`: string | ||
The name of the script that includes the `WorkflowEntrypoint`. This is optional because it defaults to the current script if not set. | ||
#### Core | ||
@@ -317,3 +333,3 @@ | ||
- `serviceBindings?: Record<string, string | typeof kCurrentWorker | { name: string | typeof kCurrentWorker, entrypoint?: string } | { network: Network } | { external: ExternalServer } | { disk: DiskDirectory } | (request: Request, instance: Miniflare) => Awaitable<Response>>` | ||
- `serviceBindings?: Record<string, string | typeof kCurrentWorker | { name: string | typeof kCurrentWorker, entrypoint?: string } | { network: Network } | { external: ExternalServer } | { disk: DiskDirectory } | { node: (req: http.IncomingMessage, res: http.ServerResponse, miniflare: Miniflare) => Awaitable<void> } | (request: Request, miniflare: Miniflare) => Awaitable<Response>>` | ||
@@ -348,5 +364,6 @@ Record mapping binding name to service designators to inject as | ||
directory. | ||
- If the designator is a function, requests will be dispatched to your custom | ||
handler. This allows you to access data and functions defined in Node.js | ||
from your Worker. Note `instance` will be the `Miniflare` instance | ||
- If the designator is an object of the form `{ node: (req: http.IncomingMessage, res: http.ServerResponse, miniflare: Miniflare) => Awaitable<void> }`, requests will be dispatched to your custom Node handler. This allows you to access data and functions defined in Node.js from your Worker using Node.js `req` and `res` objects. Note, `miniflare` will be the `Miniflare` instance dispatching the request. | ||
- If the designator is a function with the signature `(request: Request, miniflare: Miniflare) => Response`, requests will be dispatched to your custom | ||
fetch handler. This allows you to access data and functions defined in Node.js | ||
from your Worker using fetch `Request` and `Response` objects. Note, `miniflare` will be the `Miniflare` instance | ||
dispatching the request. | ||
@@ -462,3 +479,3 @@ | ||
- `outboundService?: string | { network: Network } | { external: ExternalServer } | { disk: DiskDirectory } | (request: Request) => Awaitable<Response>` | ||
- `outboundService?: string | { network: Network } | { external: ExternalServer } | { disk: DiskDirectory } | { node: (req: http.IncomingMessage, res: http.ServerResponse, miniflare: Miniflare) => Awaitable<void> } | (request: Request, miniflare: Miniflare) => Awaitable<Response>` | ||
@@ -484,2 +501,25 @@ Dispatch this Worker's global `fetch()` and `connect()` requests to the | ||
- `defaultPersistRoot?: string` | ||
Specifies the default directory where Miniflare will write persisted data when persistence is enabled. | ||
```js | ||
// Without `defaultPersistRoot` | ||
new Miniflare({ | ||
kvPersist: undefined, // → "/(tmp)/kv" | ||
d1Persist: true, // → "$PWD/.mf/d1" | ||
r2Persist: false, // → "/(tmp)/r2" | ||
cachePersist: "/my-cache", // → "/my-cache" | ||
}); | ||
// With `defaultPersistRoot` | ||
new Miniflare({ | ||
defaultPersistRoot: "/storage", | ||
kvPersist: undefined, // → "/storage/kv" | ||
d1Persist: true, // → "/storage/d1" | ||
r2Persist: false, // → "/(tmp)/r2" | ||
cachePersist: "/my-cache", // → "/my-cache" | ||
}); | ||
``` | ||
#### Cache | ||
@@ -583,2 +623,25 @@ | ||
#### Assets | ||
- `directory?: string` | ||
Path to serve Workers static asset files from. | ||
- `binding?: string` | ||
Binding name to inject as a `Fetcher` binding to allow access to static assets from within the Worker. | ||
- `assetOptions?: { html_handling?: HTMLHandlingOptions, not_found_handling?: NotFoundHandlingOptions}` | ||
Configuration for file-based asset routing - see [docs](https://developers.cloudflare.com/workers/static-assets/routing/#routing-configuration) for options | ||
#### Pipelines | ||
- `pipelines?: Record<string, PipelineOptions> | string[]` | ||
Record mapping binding name to a Pipeline. Different workers may bind to the same Pipeline with different bindings | ||
names. If a `string[]` of pipeline names, the binding and Pipeline name are assumed to be the same. | ||
#### Workflows | ||
- `workflows?: WorkflowOptions[]` | ||
Configuration for one or more Workflows in your project. | ||
#### Analytics Engine, Sending Email, Vectorize and Workers for Platforms | ||
@@ -724,2 +787,6 @@ | ||
- `workflowsPersist?: Persistence` | ||
Where to persist data stored in Workflows. See docs for `Persistence`. | ||
#### Analytics Engine, Browser Rendering, Sending Email, Vectorize, Workers AI and Workers for Platforms | ||
@@ -726,0 +793,0 @@ |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
URL strings
Supply chain riskPackage contains fragments of external URLs or IP addresses, which the package may be accessing at runtime.
Found 1 instance in 1 package
3466674
36.54%58
52.63%56811
42.92%915
7.9%40
11.11%59
20.41%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
Updated
Updated
Updated
Updated
Updated
Updated
Updated
Updated
Updated
Updated