New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

miniflare

Package Overview
Dependencies
Maintainers
1
Versions
783
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

miniflare - npm Package Compare versions

Comparing version 0.0.0-b34b24629 to 0.0.0-b35265a79

dist/src/workers/workflows/binding.worker.js

47

dist/src/workers/cache/cache.worker.js

@@ -29,5 +29,5 @@ var __create = Object.create;

// ../../node_modules/.pnpm/http-cache-semantics@4.1.0/node_modules/http-cache-semantics/index.js
// ../../node_modules/.pnpm/http-cache-semantics@4.1.1/node_modules/http-cache-semantics/index.js
var require_http_cache_semantics = __commonJS({
"../../node_modules/.pnpm/http-cache-semantics@4.1.0/node_modules/http-cache-semantics/index.js"(exports, module) {
"../../node_modules/.pnpm/http-cache-semantics@4.1.1/node_modules/http-cache-semantics/index.js"(exports, module) {
"use strict";

@@ -41,2 +41,3 @@ var statusCodeCacheableByDefault = /* @__PURE__ */ new Set([

301,
308,
404,

@@ -94,8 +95,7 @@ 405,

let cc = {};
if (!header)
return cc;
let parts = header.trim().split(/\s*,\s*/);
if (!header) return cc;
let parts = header.trim().split(/,/);
for (let part of parts) {
let [k, v] = part.split(/\s*=\s*/, 2);
cc[k] = v === void 0 ? !0 : v.replace(/^"|"$/g, "");
let [k, v] = part.split(/=/, 2);
cc[k.trim()] = v === void 0 ? !0 : v.trim().replace(/^"|"$/g, "");
}

@@ -176,4 +176,3 @@ return cc;

for (let name of fields)
if (req.headers[name] !== this._reqHeaders[name])
return !1;
if (req.headers[name] !== this._reqHeaders[name]) return !1;
return !0;

@@ -271,6 +270,4 @@ }

_fromObject(obj) {
if (this._responseTime)
throw Error("Reinitialized");
if (!obj || obj.v !== 1)
throw Error("Invalid serialization");
if (this._responseTime) throw Error("Reinitialized");
if (!obj || obj.v !== 1) throw Error("Invalid serialization");
this._responseTime = obj.t, this._isShared = obj.sh, this._cacheHeuristic = obj.ch, this._immutableMinTtl = obj.imm !== void 0 ? obj.imm : 24 * 3600 * 1e3, this._status = obj.st, this._resHeaders = obj.resh, this._rescc = obj.rescc, this._method = obj.m, this._url = obj.u, this._host = obj.h, this._noAuthorization = obj.a, this._reqHeaders = obj.reqh, this._reqcc = obj.reqcc;

@@ -313,4 +310,3 @@ }

}
} else
this._resHeaders["last-modified"] && !headers["if-modified-since"] && (headers["if-modified-since"] = this._resHeaders["last-modified"]);
} else this._resHeaders["last-modified"] && !headers["if-modified-since"] && (headers["if-modified-since"] = this._resHeaders["last-modified"]);
return headers;

@@ -483,4 +479,3 @@ }

let result = {};
for (let [key, value] of headers)
result[key.toLowerCase()] = value;
for (let [key, value] of headers) result[key.toLowerCase()] = value;
return result;

@@ -526,4 +521,3 @@ }

}
var CR = "\r".charCodeAt(0), LF = `
`.charCodeAt(0), STATUS_REGEXP = /^HTTP\/\d(?:\.\d)? (?<rawStatusCode>\d+) (?<statusText>.*)$/;
var CR = 13, LF = 10, STATUS_REGEXP = /^HTTP\/\d(?:\.\d)? (?<rawStatusCode>\d+) (?<statusText>.*)$/;
async function parseHttpResponse(stream) {

@@ -534,4 +528,3 @@ let buffer = Buffer2.alloc(0), blankLineIndex = -1;

(_value, index) => buffer[index] === CR && buffer[index + 1] === LF && buffer[index + 2] === CR && buffer[index + 3] === LF
), blankLineIndex !== -1)
break;
), blankLineIndex !== -1) break;
assert(blankLineIndex !== -1, "Expected to find blank line in HTTP message");

@@ -584,4 +577,3 @@ let rawStatusHeaders = buffer.subarray(0, blankLineIndex).toString(), [rawStatus, ...rawHeaders] = rawStatusHeaders.split(`\r

let cacheKey = getCacheKey(req);
if (isSitesRequest(req))
throw new CacheMiss();
if (isSitesRequest(req)) throw new CacheMiss();
let resHeaders, resRanges, cached = await this.storage.get(cacheKey, ({ size, headers }) => {

@@ -598,4 +590,3 @@ resHeaders = new Headers(headers);

});
if (cached?.metadata === void 0)
throw new CacheMiss();
if (cached?.metadata === void 0) throw new CacheMiss();
return assert(resHeaders !== void 0), resHeaders.set("CF-Cache-Status", "HIT"), resRanges ??= [], getMatchResponse(req.headers, {

@@ -612,4 +603,3 @@ status: cached.metadata.status,

let cacheKey = getCacheKey(req);
if (isSitesRequest(req))
throw new CacheMiss();
if (isSitesRequest(req)) throw new CacheMiss();
assert(req.body !== null);

@@ -651,4 +641,3 @@ let res = await parseHttpResponse(req.body), body = res.body;

let cacheKey = getCacheKey(req);
if (!await this.storage.delete(cacheKey))
throw new PurgeFailure();
if (!await this.storage.delete(cacheKey)) throw new PurgeFailure();
return new Response(null);

@@ -655,0 +644,0 @@ };

@@ -70,4 +70,3 @@ // ../../node_modules/.pnpm/kleur@4.1.5/node_modules/kleur/colors.mjs

function isCompressedByCloudflareFL(contentTypeHeader) {
if (!contentTypeHeader)
return !0;
if (!contentTypeHeader) return !0;
let [contentType] = contentTypeHeader.split(";");

@@ -203,15 +202,10 @@ return compressedByCloudflareFL.has(contentType);

for (let route of routes) {
if (route.protocol && route.protocol !== url.protocol)
continue;
if (route.protocol && route.protocol !== url.protocol) continue;
if (route.allowHostnamePrefix) {
if (!url.hostname.endsWith(route.hostname))
continue;
} else if (url.hostname !== route.hostname)
continue;
if (!url.hostname.endsWith(route.hostname)) continue;
} else if (url.hostname !== route.hostname) continue;
let path = url.pathname + url.search;
if (route.allowPathSuffix) {
if (!path.startsWith(route.path))
continue;
} else if (path !== route.path)
continue;
if (!path.startsWith(route.path)) continue;
} else if (path !== route.path) continue;
return route.target;

@@ -290,4 +284,3 @@ }

function unflatten(parsed, revivers) {
if (typeof parsed == "number")
return hydrate(parsed, !0);
if (typeof parsed == "number") return hydrate(parsed, !0);
if (!Array.isArray(parsed) || parsed.length === 0)

@@ -300,16 +293,9 @@ throw new Error("Invalid input");

function hydrate(index, standalone = !1) {
if (index === -1)
return;
if (index === -3)
return NaN;
if (index === -4)
return 1 / 0;
if (index === -5)
return -1 / 0;
if (index === -6)
return -0;
if (standalone)
throw new Error("Invalid input");
if (index in hydrated)
return hydrated[index];
if (index === -1) return;
if (index === -3) return NaN;
if (index === -4) return 1 / 0;
if (index === -5) return -1 / 0;
if (index === -6) return -0;
if (standalone) throw new Error("Invalid input");
if (index in hydrated) return hydrated[index];
let value = values[index];

@@ -387,14 +373,8 @@ if (!value || typeof value != "object")

throw new DevalueError("Cannot stringify a function", keys);
if (indexes.has(thing))
return indexes.get(thing);
if (thing === void 0)
return -1;
if (Number.isNaN(thing))
return -3;
if (thing === 1 / 0)
return -4;
if (thing === -1 / 0)
return -5;
if (thing === 0 && 1 / thing < 0)
return -6;
if (indexes.has(thing)) return indexes.get(thing);
if (thing === void 0) return -1;
if (Number.isNaN(thing)) return -3;
if (thing === 1 / 0) return -4;
if (thing === -1 / 0) return -5;
if (thing === 0 && 1 / thing < 0) return -6;
let index2 = p++;

@@ -563,4 +543,3 @@ indexes.set(thing, index2);

Headers(val) {
if (val instanceof impl.Headers)
return Object.fromEntries(val);
if (val instanceof impl.Headers) return Object.fromEntries(val);
},

@@ -751,4 +730,3 @@ Request(val) {

let hostHeader = request.headers.get("Host");
if (hostHeader == null)
return new Response(null, { status: 400 });
if (hostHeader == null) return new Response(null, { status: 400 });
try {

@@ -762,4 +740,3 @@ let host = new URL(`http://${hostHeader}`);

let secretHex = request.headers.get(CoreHeaders.OP_SECRET);
if (secretHex == null)
return new Response(null, { status: 401 });
if (secretHex == null) return new Response(null, { status: 401 });
let expectedSecret = this.env[CoreBindings.DATA_PROXY_SECRET], secretBuffer = Buffer2.from(secretHex, "hex");

@@ -769,4 +746,3 @@ if (secretBuffer.byteLength !== expectedSecret.byteLength || !crypto.subtle.timingSafeEqual(secretBuffer, expectedSecret))

let opHeader = request.headers.get(CoreHeaders.OP), targetHeader = request.headers.get(CoreHeaders.OP_TARGET), keyHeader = request.headers.get(CoreHeaders.OP_KEY), allowAsync = request.headers.get(CoreHeaders.OP_SYNC) === null, argsSizeHeader = request.headers.get(CoreHeaders.OP_STRINGIFIED_SIZE), contentLengthHeader = request.headers.get("Content-Length");
if (targetHeader === null)
return new Response(null, { status: 400 });
if (targetHeader === null) return new Response(null, { status: 400 });
if (opHeader === ProxyOps.FREE) {

@@ -790,4 +766,3 @@ for (let targetValue of targetHeader.split(",")) {

} else if (opHeader === ProxyOps.GET_OWN_DESCRIPTOR) {
if (keyHeader === null)
return new Response(null, { status: 400 });
if (keyHeader === null) return new Response(null, { status: 400 });
let descriptor = Object.getOwnPropertyDescriptor(target, keyHeader);

@@ -966,7 +941,5 @@ descriptor !== void 0 && (result = {

function ensureAcceptableEncoding(clientAcceptEncoding, response) {
if (clientAcceptEncoding === null)
return response;
if (clientAcceptEncoding === null) return response;
let encodings = parseAcceptEncoding(clientAcceptEncoding);
if (encodings.length === 0)
return response;
if (encodings.length === 0) return response;
let contentEncoding = response.headers.get("Content-Encoding"), contentType = response.headers.get("Content-Type");

@@ -993,4 +966,3 @@ if (!isCompressedByCloudflareFL(contentType) || contentEncoding !== null && contentEncoding !== "gzip" && contentEncoding !== "br")

function maybeLogRequest(req, res, env, ctx, startTime) {
if (env[CoreBindings.JSON_LOG_LEVEL] < LogLevel.INFO)
return;
if (env[CoreBindings.JSON_LOG_LEVEL] < LogLevel.INFO) return;
let url = new URL(req.url), statusText = (res.statusText.trim() || STATUS_CODES[res.status]) ?? "", lines = [

@@ -1025,4 +997,3 @@ `${bold(req.method)} ${url.pathname} `,

let startTime = Date.now();
if (request.headers.get(CoreHeaders.OP) !== null)
return handleProxy(request, env);
if (request.headers.get(CoreHeaders.OP) !== null) return handleProxy(request, env);
let disablePrettyErrorPage = request.headers.get(CoreHeaders.DISABLE_PRETTY_ERROR) !== null, clientAcceptEncoding = request.headers.get("Accept-Encoding");

@@ -1029,0 +1000,0 @@ try {

@@ -31,4 +31,3 @@ var __defProp = Object.defineProperty;

for (let { name: table, sql } of tables) {
if (filterTables.size > 0 && !filterTables.has(table))
continue;
if (filterTables.size > 0 && !filterTables.has(table)) continue;
if (table === "sqlite_sequence")

@@ -47,4 +46,3 @@ noSchema || (yield "DELETE FROM sqlite_sequence;");

}
if (noData)
continue;
if (noData) continue;
let columns_cursor = db.exec(`PRAGMA table_info="${table}"`), columns = Array.from(columns_cursor), select = `SELECT ${columns.map((c) => c.name).join(", ")}

@@ -51,0 +49,0 @@ FROM "${table}";`, rows_cursor = db.exec(select);

@@ -48,4 +48,3 @@ var __defProp = Object.defineProperty;

function decodeKey({ key }, query) {
if (query.get(KVParams.URL_ENCODED)?.toLowerCase() !== "true")
return key;
if (query.get(KVParams.URL_ENCODED)?.toLowerCase() !== "true") return key;
try {

@@ -184,4 +183,3 @@ return decodeURIComponent(key);

let entry = await this.storage.get(key);
if (entry === null)
throw new HttpError2(404, "Not Found");
if (entry === null) throw new HttpError2(404, "Not Found");
let headers = new Headers();

@@ -188,0 +186,0 @@ return entry.expiration !== void 0 && headers.set(

@@ -105,6 +105,4 @@ // src/workers/kv/sites.worker.ts

let aElement = a[i], bElement = b[i];
if (aElement < bElement)
return -1;
if (aElement > bElement)
return 1;
if (aElement < bElement) return -1;
if (aElement > bElement) return 1;
}

@@ -120,4 +118,3 @@ return a.length - b.length;

keys.sort((a, b) => arrayCompare(a.encodedName, b.encodedName));
for (let key of keys)
delete key.encodedName;
for (let key of keys) delete key.encodedName;
let startAfter = cursor === void 0 ? "" : base64Decode(cursor), startIndex = 0;

@@ -124,0 +121,0 @@ startAfter !== "" && (startIndex = keys.findIndex(({ name }) => name === startAfter), startIndex === -1 && (startIndex = keys.length), startIndex++);

@@ -60,5 +60,7 @@ var __defProp = Object.defineProperty;

maxRetires: z.number().min(0).max(100).optional(),
// deprecated
maxRetries: z.number().min(0).max(100).optional(),
deadLetterQueue: z.ostring(),
retryDelay: QueueMessageDelaySchema
}), QueueConsumerSchema = /* @__PURE__ */ z.intersection(
}).transform((queue) => (queue.maxRetires !== void 0 && (queue.maxRetries = queue.maxRetires), queue)), QueueConsumerSchema = /* @__PURE__ */ z.intersection(
QueueConsumerOptionsSchema,

@@ -79,3 +81,3 @@ z.object({ workerName: z.string() })

// src/workers/queues/broker.worker.ts
var MAX_MESSAGE_SIZE_BYTES = 128 * 1e3, MAX_MESSAGE_BATCH_COUNT = 100, MAX_MESSAGE_BATCH_SIZE = (256 + 32) * 1e3, DEFAULT_BATCH_SIZE = 5, DEFAULT_BATCH_TIMEOUT = 1, DEFAULT_RETRIES = 2, exceptionQueueResponse = {
var MAX_MESSAGE_SIZE_BYTES = 128 * 1e3, MAX_MESSAGE_BATCH_COUNT = 100, MAX_MESSAGE_BATCH_SIZE = 288 * 1e3, DEFAULT_BATCH_SIZE = 5, DEFAULT_BATCH_TIMEOUT = 1, DEFAULT_RETRIES = 2, exceptionQueueResponse = {
outcome: "exception",

@@ -109,4 +111,3 @@ retryBatch: { retry: !1 },

let format = headers.get("X-Msg-Delay-Secs");
if (!format)
return;
if (!format) return;
let result = QueueMessageDelaySchema.safeParse(Number(format));

@@ -204,3 +205,3 @@ if (!result.success)

assert(consumer !== void 0);
let batchSize = consumer.maxBatchSize ?? DEFAULT_BATCH_SIZE, maxAttempts = (consumer.maxRetires ?? DEFAULT_RETRIES) + 1, maxAttemptsS = maxAttempts === 1 ? "" : "s", batch = this.#messages.splice(0, batchSize), startTime = Date.now(), endTime, response;
let batchSize = consumer.maxBatchSize ?? DEFAULT_BATCH_SIZE, maxAttempts = (consumer.maxRetries ?? DEFAULT_RETRIES) + 1, maxAttemptsS = maxAttempts === 1 ? "" : "s", batch = this.#messages.splice(0, batchSize), startTime = Date.now(), endTime, response;
try {

@@ -225,10 +226,9 @@ response = await this.#dispatchBatch(consumer.workerName, batch), endTime = Date.now();

this.timers.setTimeout(fn, delay * 1e3);
} else
consumer.deadLetterQueue !== void 0 ? (await this.logWithLevel(
LogLevel.WARN,
`Moving message "${message.id}" on queue "${this.name}" to dead letter queue "${consumer.deadLetterQueue}" after ${maxAttempts} failed attempt${maxAttemptsS}...`
), toDeadLetterQueue.push(message)) : await this.logWithLevel(
LogLevel.WARN,
`Dropped message "${message.id}" on queue "${this.name}" after ${maxAttempts} failed attempt${maxAttemptsS}!`
);
} else consumer.deadLetterQueue !== void 0 ? (await this.logWithLevel(
LogLevel.WARN,
`Moving message "${message.id}" on queue "${this.name}" to dead letter queue "${consumer.deadLetterQueue}" after ${maxAttempts} failed attempt${maxAttemptsS}...`
), toDeadLetterQueue.push(message)) : await this.logWithLevel(
LogLevel.WARN,
`Dropped message "${message.id}" on queue "${this.name}" after ${maxAttempts} failed attempt${maxAttemptsS}!`
);
let acked = batch.length - failedMessages;

@@ -256,4 +256,3 @@ if (await this.logWithLevel(

if (this.#pendingFlush !== void 0) {
if (this.#pendingFlush.immediate || batchHasSpace)
return;
if (this.#pendingFlush.immediate || batchHasSpace) return;
this.timers.clearTimeout(this.#pendingFlush.timeout), this.#pendingFlush = void 0;

@@ -273,4 +272,3 @@ }

message = async (req) => {
if (this.#maybeConsumer === void 0)
return new Response();
if (this.#maybeConsumer === void 0) return new Response();
validateMessageSize(req.headers);

@@ -284,4 +282,3 @@ let contentType = validateContentType(req.headers), delay = validateMessageDelay(req.headers) ?? this.#maybeProducer?.deliveryDelay, body = Buffer2.from(await req.arrayBuffer());

batch = async (req) => {
if (this.#maybeConsumer === void 0)
return new Response();
if (this.#maybeConsumer === void 0) return new Response();
validateBatchSize(req.headers);

@@ -288,0 +285,0 @@ let delay = validateMessageDelay(req.headers) ?? this.#maybeProducer?.deliveryDelay, body = QueuesBatchRequestSchema.parse(await req.json());

@@ -494,4 +494,3 @@ var __defProp = Object.defineProperty;

for (let i = 0; i < x.length; i++)
if (x.charCodeAt(i) >= 256)
return x.length * 2;
if (x.charCodeAt(i) >= 256) return x.length * 2;
return x.length;

@@ -521,13 +520,10 @@ }

let ranges = parseRanges(options.rangeHeader, size);
if (ranges?.length === 1)
return ranges[0];
if (ranges?.length === 1) return ranges[0];
} else if (options.range !== void 0) {
let { offset, length, suffix } = options.range;
if (suffix !== void 0) {
if (suffix <= 0)
throw new InvalidRange();
if (suffix <= 0) throw new InvalidRange();
suffix > size && (suffix = size), offset = size - suffix, length = suffix;
}
if (offset === void 0 && (offset = 0), length === void 0 && (length = size - offset), offset < 0 || offset > size || length <= 0)
throw new InvalidRange();
if (offset === void 0 && (offset = 0), length === void 0 && (length = size - offset), offset < 0 || offset > size || length <= 0) throw new InvalidRange();
return offset + length > size && (length = size - offset), { start: offset, end: offset + length - 1 };

@@ -542,4 +538,3 @@ }

metadataSize(customMetadata) {
if (customMetadata === void 0)
return this;
if (customMetadata === void 0) return this;
let metadataLength = 0;

@@ -574,4 +569,3 @@ for (let [key, value] of Object.entries(customMetadata))

async transform(chunk, controller) {
for (let hash of hashes)
await hash.writer.write(chunk);
for (let hash of hashes) await hash.writer.write(chunk);
controller.enqueue(chunk);

@@ -600,4 +594,3 @@ },

let hash = createHash("md5");
for (let md5Hex of md5Hexes)
hash.update(md5Hex, "hex");
for (let md5Hex of md5Hexes) hash.update(md5Hex, "hex");
return `${hash.digest("hex")}-${md5Hexes.length}`;

@@ -610,4 +603,3 @@ }

let metadataSize = parseInt(req.headers.get(R2Headers.METADATA_SIZE));
if (Number.isNaN(metadataSize))
throw new InvalidMetadata();
if (Number.isNaN(metadataSize)) throw new InvalidMetadata();
assert2(req.body !== null);

@@ -619,4 +611,3 @@ let body = req.body, [metadataBuffer, value] = await readPrefix(body, metadataSize), metadataJson = decoder.decode(metadataBuffer);

let header = req.headers.get(R2Headers.REQUEST);
if (header === null)
throw new InvalidMetadata();
if (header === null) throw new InvalidMetadata();
return R2BindingRequestSchema.parse(JSON.parse(header));

@@ -730,6 +721,4 @@ }

let partRows = stmtDeletePartsByKey({ object_key: key });
for (let partRow of partRows)
oldBlobIds.push(partRow.blob_id);
} else
maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId);
for (let partRow of partRows) oldBlobIds.push(partRow.blob_id);
} else maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId);
}

@@ -799,4 +788,3 @@ return oldBlobIds;

for (let { part } of selectedParts) {
if (partNumberSet.has(part))
throw new InternalError();
if (partNumberSet.has(part)) throw new InternalError();
partNumberSet.add(part);

@@ -826,6 +814,4 @@ }

let partRows2 = stmtDeletePartsByKey({ object_key: key });
for (let partRow of partRows2)
oldBlobIds.push(partRow.blob_id);
} else
maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId);
for (let partRow of partRows2) oldBlobIds.push(partRow.blob_id);
} else maybeOldBlobId !== void 0 && oldBlobIds.push(maybeOldBlobId);
let totalSize = parts.reduce((acc, { size }) => acc + size, 0), etag = generateMultipartEtag(

@@ -852,4 +838,3 @@ parts.map(({ checksum_md5 }) => checksum_md5)

let partRows = stmtDeleteUnlinkedPartsByUploadId({ upload_id });
for (let partRow of partRows)
oldBlobIds.push(partRow.blob_id);
for (let partRow of partRows) oldBlobIds.push(partRow.blob_id);
return stmtUpdateUploadState({

@@ -946,4 +931,3 @@ upload_id,

let row = get(this.#stmts.getByKey({ key }));
if (row === void 0)
throw new NoSuchKey();
if (row === void 0) throw new NoSuchKey();
let range = { offset: 0, length: row.size };

@@ -955,4 +939,3 @@ return new InternalR2Object(row, range);

let result = this.#stmts.getPartsByKey(key);
if (result === void 0)
throw new NoSuchKey();
if (result === void 0) throw new NoSuchKey();
let { row, parts } = result, defaultR2Range = { offset: 0, length: row.size };

@@ -976,4 +959,3 @@ try {

value = this.#assembleMultipartValue(parts, range ?? defaultRange);
} else if (value = await this.blob.get(row.blob_id, range), value === null)
throw new NoSuchKey();
} else if (value = await this.blob.get(row.blob_id, range), value === null) throw new NoSuchKey();
return new InternalR2ObjectBody(row, value, r2Range);

@@ -1004,4 +986,3 @@ }

if (oldBlobIds !== void 0)
for (let blobId2 of oldBlobIds)
this.#backgroundDelete(blobId2);
for (let blobId2 of oldBlobIds) this.#backgroundDelete(blobId2);
return new InternalR2Object(row);

@@ -1011,7 +992,5 @@ }

Array.isArray(keys) || (keys = [keys]);
for (let key of keys)
validate.key(key);
for (let key of keys) validate.key(key);
let oldBlobIds = this.#stmts.deleteByKeys(keys);
for (let blobId of oldBlobIds)
this.#backgroundDelete(blobId);
for (let blobId of oldBlobIds) this.#backgroundDelete(blobId);
}

@@ -1096,4 +1075,3 @@ #listWithoutDelimiterQuery(excludeHttp, excludeCustom) {

);
for (let blobId of oldBlobIds)
this.#backgroundDelete(blobId);
for (let blobId of oldBlobIds) this.#backgroundDelete(blobId);
return new InternalR2Object(newRow);

@@ -1104,4 +1082,3 @@ }

let oldBlobIds = this.#stmts.abortMultipartUpload(key, uploadId);
for (let blobId of oldBlobIds)
this.#backgroundDelete(blobId);
for (let blobId of oldBlobIds) this.#backgroundDelete(blobId);
}

@@ -1108,0 +1085,0 @@ get = async (req) => {

@@ -49,4 +49,3 @@ // src/workers/shared/blob.worker.ts

let headers = range === void 0 ? {} : rangeHeaders(range), res = await fetcher.fetch(url, { headers });
if (res.status === 404)
return null;
if (res.status === 404) return null;
if (assert(res.ok && res.body !== null), range !== void 0 && res.status !== 206) {

@@ -85,4 +84,3 @@ let contentLength = parseInt(res.headers.get("Content-Length"));

let res = await fetcher.fetch(url, { method: "HEAD" });
if (res.status === 404)
return null;
if (res.status === 404) return null;
assert(res.ok);

@@ -150,7 +148,5 @@ let contentLength = parseInt(res.headers.get("Content-Length"));

async delete(id) {
if (this.#stickyBlobs)
return;
if (this.#stickyBlobs) return;
let idURL = this.idURL(id);
if (idURL === null)
return;
if (idURL === null) return;
let res = await this.#fetcher.fetch(idURL, { method: "DELETE" });

@@ -214,4 +210,3 @@ assert(res.ok || res.status === 404);

let result;
for (let row of cursor)
result ??= row;
for (let row of cursor) result ??= row;
return result;

@@ -293,4 +288,3 @@ }

let row = get(this.#stmts.getByKey(key));
if (row === void 0)
return null;
if (row === void 0) return null;
if (this.#hasExpired(row))

@@ -330,4 +324,3 @@ return drain(this.#stmts.deleteByKey({ key })), this.#backgroundDelete(row.blob_id), null;

}), rows = Array.from(rowsCursor), expiredRows = this.#stmts.deleteExpired({ now });
for (let row of expiredRows)
this.#backgroundDelete(row.blob_id);
for (let row of expiredRows) this.#backgroundDelete(row.blob_id);
let hasMoreRows = rows.length === opts.limit + 1;

@@ -342,8 +335,4 @@ rows.splice(opts.limit, 1);

function testRegExps(matcher, value) {
for (let exclude of matcher.exclude)
if (exclude.test(value))
return !1;
for (let include of matcher.include)
if (include.test(value))
return !0;
for (let exclude of matcher.exclude) if (exclude.test(value)) return !1;
for (let include of matcher.include) if (include.test(value)) return !0;
return !1;

@@ -377,4 +366,3 @@ }

let url = new URL(req.url), methodRoutes = this.#routes?.get(req.method);
if (methodRoutes === void 0)
return new Response(null, { status: 405 });
if (methodRoutes === void 0) return new Response(null, { status: 405 });
let handlers = this;

@@ -384,4 +372,3 @@ try {

let match = path.exec(url.pathname);
if (match !== null)
return await handlers[key](req, match.groups, url);
if (match !== null) return await handlers[key](req, match.groups, url);
}

@@ -430,9 +417,7 @@ return new Response(null, { status: 404 });

clearTimeout(handle) {
if (typeof handle == "number")
return clearTimeout(handle);
if (typeof handle == "number") return clearTimeout(handle);
this.#fakePendingTimeouts.delete(handle[kFakeTimerHandle]);
}
queueMicrotask(closure) {
if (this.#fakeTimestamp === void 0)
return queueMicrotask(closure);
if (this.#fakeTimestamp === void 0) return queueMicrotask(closure);
let result = closure();

@@ -502,4 +487,3 @@ result instanceof Promise && (this.#fakeRunningTasks.add(result), result.finally(() => this.#fakeRunningTasks.delete(result)));

get blob() {
if (this.#blob !== void 0)
return this.#blob;
if (this.#blob !== void 0) return this.#blob;
let maybeBlobsService = this.env[SharedBindings.MAYBE_SERVICE_BLOBS], stickyBlobs = !!this.env[SharedBindings.MAYBE_JSON_ENABLE_STICKY_BLOBS];

@@ -547,4 +531,3 @@ return assert5(

let controlOp = req?.cf?.miniflare?.controlOp;
if (controlOp !== void 0)
return this.#handleControlOp(controlOp);
if (controlOp !== void 0) return this.#handleControlOp(controlOp);
}

@@ -573,28 +556,21 @@ let name = req.cf?.miniflare?.name;

let prefixMatch = rangePrefixRegexp.exec(rangeHeader);
if (prefixMatch === null)
return;
if (rangeHeader = rangeHeader.substring(prefixMatch[0].length), rangeHeader.trimStart() === "")
return [];
if (prefixMatch === null) return;
if (rangeHeader = rangeHeader.substring(prefixMatch[0].length), rangeHeader.trimStart() === "") return [];
let ranges = rangeHeader.split(","), result = [];
for (let range of ranges) {
let match = rangeRegexp.exec(range);
if (match === null)
return;
if (match === null) return;
let { start, end } = match.groups;
if (start !== void 0 && end !== void 0) {
let rangeStart = parseInt(start), rangeEnd = parseInt(end);
if (rangeStart > rangeEnd || rangeStart >= length)
return;
if (rangeStart > rangeEnd || rangeStart >= length) return;
rangeEnd >= length && (rangeEnd = length - 1), result.push({ start: rangeStart, end: rangeEnd });
} else if (start !== void 0 && end === void 0) {
let rangeStart = parseInt(start);
if (rangeStart >= length)
return;
if (rangeStart >= length) return;
result.push({ start: rangeStart, end: length - 1 });
} else if (start === void 0 && end !== void 0) {
let suffix = parseInt(end);
if (suffix >= length)
return [];
if (suffix === 0)
continue;
if (suffix >= length) return [];
if (suffix === 0) continue;
result.push({ start: length - suffix, end: length - 1 });

@@ -634,4 +610,3 @@ } else

let resolve;
for (; (resolve = this.drainQueue.shift()) !== void 0; )
resolve();
for (; (resolve = this.drainQueue.shift()) !== void 0; ) resolve();
}

@@ -665,4 +640,3 @@ }

let resolve;
for (; (resolve = this.resolveQueue.shift()) !== void 0; )
resolve();
for (; (resolve = this.resolveQueue.shift()) !== void 0; ) resolve();
}

@@ -669,0 +643,0 @@ }

{
"name": "miniflare",
"version": "0.0.0-b34b24629",
"version": "0.0.0-b35265a79",
"description": "Fun, full-featured, fully-local simulator for Cloudflare Workers",

@@ -41,4 +41,4 @@ "keywords": [

"undici": "^5.28.4",
"workerd": "1.20240925.0",
"ws": "^8.17.1",
"workerd": "1.20241106.1",
"ws": "^8.18.0",
"youch": "^3.2.2",

@@ -49,3 +49,3 @@ "zod": "^3.22.3"

"@ava/typescript": "^4.1.0",
"@cloudflare/workers-types": "^4.20240925.0",
"@cloudflare/workers-types": "^4.20241106.0",
"@microsoft/api-extractor": "^7.47.0",

@@ -57,3 +57,3 @@ "@types/debug": "^4.1.7",

"@types/mime": "^3.0.4",
"@types/node": "20.8.3",
"@types/node": "^18.19.59",
"@types/rimraf": "^4.0.5",

@@ -70,3 +70,3 @@ "@types/stoppable": "^1.1.1",

"devtools-protocol": "^0.0.1182435",
"esbuild": "0.17.19",
"esbuild": "0.24.0",
"eslint": "^8.49.0",

@@ -78,2 +78,3 @@ "eslint-config-prettier": "^9.0.0",

"expect-type": "^0.15.0",
"heap-js": "^2.5.0",
"http-cache-semantics": "^4.1.0",

@@ -86,7 +87,8 @@ "kleur": "^4.1.5",

"which": "^2.0.2",
"@cloudflare/kv-asset-handler": "0.3.4",
"@cloudflare/workers-shared": "0.0.0-b34b24629"
"@cloudflare/workflows-shared": "0.2.0",
"@cloudflare/workers-shared": "0.8.0",
"@cloudflare/kv-asset-handler": "0.3.4"
},
"engines": {
"node": ">=16.13"
"node": ">=18.0.0"
},

@@ -93,0 +95,0 @@ "volta": {

@@ -217,2 +217,16 @@ # 🔥 Miniflare

### `interface WorkflowOptions`
- `name: string`
The name of the Workflow.
- `className: string`
The name of the class exported from the Worker that implements the `WorkflowEntrypoint`.
- `scriptName?`: string
The name of the script that includes the `WorkflowEntrypoint`. This is optional because it defaults to the current script if not set.
#### Core

@@ -589,2 +603,7 @@

#### Workflows
- `workflows?: WorkflowOptions[]`
Configuration for one or more Workflows in your project.
#### Analytics Engine, Sending Email, Vectorize and Workers for Platforms

@@ -730,2 +749,6 @@

- `workflowsPersist?: Persistence`
Where to persist data stored in Workflows. See docs for `Persistence`.
#### Analytics Engine, Browser Rendering, Sending Email, Vectorize, Workers AI and Workers for Platforms

@@ -732,0 +755,0 @@

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc